after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
object_merge(
json.load(
io.open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
)
),
settings_data,
)
json.dump(
settings_data,
io.open(
str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF
),
)
|
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
if settings_path.exists() and merge: # pragma: no cover
object_merge(
json.load(
io.open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
)
),
settings_data,
)
json.dump(
settings_data,
io.open(
str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF
),
)
|
https://github.com/rochacbruno/dynaconf/issues/129
|
In [1]: from dynaconf import settings
In [2]: settings.POTATO
Out[2]: 'test'
In [3]: settings.get_fresh('POTATO')
In [4]: settings.POTATO
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-4-22d467a777a4> in <module>
----> 1 settings.POTATO
~/miniconda3/lib/python3.7/site-packages/dynaconf/base.py in __getattr__(self, name)
95 raise AttributeError(
96 "Attribute %s was deleted, "
---> 97 "or belongs to different env" % name
98 )
99 if (
AttributeError: Attribute POTATO was deleted, or belongs to different env
|
AttributeError
|
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
existing = DynaconfDict()
load(existing, str(settings_path))
object_merge(existing, settings_data)
with io.open(
str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF
) as f:
f.writelines(
["{} = {}\n".format(k.upper(), repr(v)) for k, v in settings_data.items()]
)
|
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
if settings_path.exists() and merge: # pragma: no cover
existing = DynaconfDict()
load(existing, str(settings_path))
object_merge(existing, settings_data)
with io.open(
str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF
) as f:
f.writelines(
["{} = {}\n".format(k.upper(), repr(v)) for k, v in settings_data.items()]
)
|
https://github.com/rochacbruno/dynaconf/issues/129
|
In [1]: from dynaconf import settings
In [2]: settings.POTATO
Out[2]: 'test'
In [3]: settings.get_fresh('POTATO')
In [4]: settings.POTATO
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-4-22d467a777a4> in <module>
----> 1 settings.POTATO
~/miniconda3/lib/python3.7/site-packages/dynaconf/base.py in __getattr__(self, name)
95 raise AttributeError(
96 "Attribute %s was deleted, "
---> 97 "or belongs to different env" % name
98 )
99 if (
AttributeError: Attribute POTATO was deleted, or belongs to different env
|
AttributeError
|
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
object_merge(
toml.load(
io.open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
)
),
settings_data,
)
toml.dump(
settings_data,
io.open(
str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF
),
)
|
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
if settings_path.exists() and merge: # pragma: no cover
object_merge(
toml.load(
io.open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
)
),
settings_data,
)
toml.dump(
settings_data,
io.open(
str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF
),
)
|
https://github.com/rochacbruno/dynaconf/issues/129
|
In [1]: from dynaconf import settings
In [2]: settings.POTATO
Out[2]: 'test'
In [3]: settings.get_fresh('POTATO')
In [4]: settings.POTATO
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-4-22d467a777a4> in <module>
----> 1 settings.POTATO
~/miniconda3/lib/python3.7/site-packages/dynaconf/base.py in __getattr__(self, name)
95 raise AttributeError(
96 "Attribute %s was deleted, "
---> 97 "or belongs to different env" % name
98 )
99 if (
AttributeError: Attribute POTATO was deleted, or belongs to different env
|
AttributeError
|
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
object_merge(
yaml.load(
io.open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
)
),
settings_data,
)
yaml.dump(
settings_data,
io.open(
str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF
),
)
|
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
if settings_path.exists() and merge: # pragma: no cover
object_merge(
yaml.load(
io.open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
)
),
settings_data,
)
yaml.dump(
settings_data,
io.open(
str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF
),
)
|
https://github.com/rochacbruno/dynaconf/issues/129
|
In [1]: from dynaconf import settings
In [2]: settings.POTATO
Out[2]: 'test'
In [3]: settings.get_fresh('POTATO')
In [4]: settings.POTATO
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-4-22d467a777a4> in <module>
----> 1 settings.POTATO
~/miniconda3/lib/python3.7/site-packages/dynaconf/base.py in __getattr__(self, name)
95 raise AttributeError(
96 "Attribute %s was deleted, "
---> 97 "or belongs to different env" % name
98 )
99 if (
AttributeError: Attribute POTATO was deleted, or belongs to different env
|
AttributeError
|
def settings_module(self):
"""Gets SETTINGS_MODULE variable"""
settings_module = os.environ.get(
self.ENVVAR_FOR_DYNACONF, self.SETTINGS_MODULE_FOR_DYNACONF
)
if settings_module != getattr(self, "SETTINGS_MODULE", None):
self.set("SETTINGS_MODULE", settings_module)
return self.SETTINGS_MODULE
|
def settings_module(self):
"""Gets SETTINGS_MODULE variable"""
settings_module = os.environ.get(
self.ENVVAR_FOR_DYNACONF, self.SETTINGS_MODULE_FOR_DYNACONF
)
if settings_module != self.SETTINGS_MODULE:
self.set("SETTINGS_MODULE", settings_module)
return self.SETTINGS_MODULE
|
https://github.com/rochacbruno/dynaconf/issues/94
|
development
var1_dev
var2_dev
production
Traceback (most recent call last):
File "/home/hch/PycharmProjects/dynaconf/iia/my_project/main.py", line 13, in <module>
settings.setenv('production')
File "/home/hch/.virtualenvs/dynaconf/lib/python3.6/site-packages/dynaconf/base.py", line 459, in setenv
self.execute_loaders(env=env, silent=silent, filename=filename)
File "/home/hch/.virtualenvs/dynaconf/lib/python3.6/site-packages/dynaconf/base.py", line 572, in execute_loaders
filename=filename)
File "/home/hch/.virtualenvs/dynaconf/lib/python3.6/site-packages/dynaconf/loaders/__init__.py", line 53, in settings_loader
settings_module = settings_module or obj.settings_module
File "/home/hch/.virtualenvs/dynaconf/lib/python3.6/site-packages/dynaconf/base.py", line 413, in settings_module
if settings_module != self.SETTINGS_MODULE:
AttributeError: 'Settings' object has no attribute 'SETTINGS_MODULE'
|
AttributeError
|
def init(self, conf: ConfigTree) -> None:
conf = conf.with_fallback(GlueExtractor.DEFAULT_CONFIG)
self._filters = conf.get(GlueExtractor.FILTER_KEY)
self._connection_name = conf.get(GlueExtractor.CONNECTION_NAME_KEY) or ""
self._is_location_parsing_enabled = conf.get(
GlueExtractor.IS_LOCATION_PARSING_ENABLED_KEY
)
self._glue = boto3.client("glue")
self._extract_iter: Union[None, Iterator] = None
|
def init(self, conf: ConfigTree) -> None:
conf = conf.with_fallback(GlueExtractor.DEFAULT_CONFIG)
self._filters = conf.get(GlueExtractor.FILTER_KEY)
self._glue = boto3.client("glue")
self._extract_iter: Union[None, Iterator] = None
self._connection_name = conf.get(GlueExtractor.CONNECTION_NAME_KEY, None) or ""
|
https://github.com/dataframehq/whale/issues/90
|
Traceback (most recent call last):
File "/usr/local/Cellar/whale/v1.1.4/bin/../libexec/build_script.py", line 23, in <module>
pull()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/utils/task_wrappers.py", line 113, in pull
task.run()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/task/__init__.py", line 23, in run
record = self.extractor.extract()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 28, in extract
return next(self._extract_iter)
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 52, in _get_extract_iter
catalog, schema, table = self._parse_location(
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 91, in _parse_location
table = splits[-1]
IndexError: list index out of range
|
IndexError
|
def _get_extract_iter(self) -> Iterator[TableMetadata]:
for row in self._get_raw_extract_iter():
columns, i = [], 0
for column in row["StorageDescriptor"]["Columns"] + row.get(
"PartitionKeys", []
):
columns.append(
ColumnMetadata(
column["Name"],
column["Comment"] if "Comment" in column else None,
column["Type"],
i,
)
)
i += 1
if self._is_location_parsing_enabled:
catalog, schema, table = self._parse_location(
location=row["StorageDescriptor"]["Location"], name=row["Name"]
)
else:
catalog = None
schema = None
table = row["Name"]
if self._connection_name:
database = self._connection_name + "/" + row["DatabaseName"]
else:
database = row["DatabaseName"]
yield TableMetadata(
database,
catalog,
schema,
table,
row.get("Description") or row.get("Parameters", {}).get("comment"),
columns,
row.get("TableType") == "VIRTUAL_VIEW",
)
|
def _get_extract_iter(self) -> Iterator[TableMetadata]:
for row in self._get_raw_extract_iter():
columns, i = [], 0
for column in row["StorageDescriptor"]["Columns"] + row.get(
"PartitionKeys", []
):
columns.append(
ColumnMetadata(
column["Name"],
column["Comment"] if "Comment" in column else None,
column["Type"],
i,
)
)
i += 1
catalog, schema, table = self._parse_location(
location=row["StorageDescriptor"]["Location"], name=row["Name"]
)
if self._connection_name:
database = self._connection_name + "/" + row["DatabaseName"]
else:
database = row["DatabaseName"]
yield TableMetadata(
database,
catalog,
schema,
table,
row.get("Description") or row.get("Parameters", {}).get("comment"),
columns,
row.get("TableType") == "VIRTUAL_VIEW",
)
|
https://github.com/dataframehq/whale/issues/90
|
Traceback (most recent call last):
File "/usr/local/Cellar/whale/v1.1.4/bin/../libexec/build_script.py", line 23, in <module>
pull()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/utils/task_wrappers.py", line 113, in pull
task.run()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/task/__init__.py", line 23, in run
record = self.extractor.extract()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 28, in extract
return next(self._extract_iter)
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 52, in _get_extract_iter
catalog, schema, table = self._parse_location(
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 91, in _parse_location
table = splits[-1]
IndexError: list index out of range
|
IndexError
|
def __init__(
self,
metadata_source: str,
dialect: Optional[str] = None,
uri: Optional[str] = None,
port: Optional[int] = None,
username: Optional[str] = None,
password: Optional[str] = None,
name: Optional[str] = None,
database: Optional[str] = None,
instance: Optional[str] = None,
cluster: Optional[str] = None,
is_location_parsing_enabled: bool = False,
included_schemas: List = [],
excluded_schemas: List = [],
included_keys: Optional[List[str]] = None,
excluded_keys: Optional[List[str]] = None,
included_key_regex: Optional[str] = None,
excluded_key_regex: Optional[str] = None,
included_tables_regex: Optional[str] = None,
build_script_path: Optional[str] = None,
venv_path: Optional[str] = None,
python_binary: Optional[str] = None,
key_path: Optional[str] = None,
project_id: Optional[str] = None,
project_credentials: Optional[str] = None,
page_size: Optional[str] = None,
filter_key: Optional[str] = None,
where_clause_suffix: Optional[str] = "",
**kwargs,
):
self.uri = uri
self.port = port
if metadata_source is not None:
metadata_source = metadata_source.lower()
self.metadata_source = metadata_source
self.dialect = dialect
self.username = username
self.password = password
self.name = name
self.database = database
self.instance = instance
self.cluster = cluster
self.is_location_parsing_enabled = is_location_parsing_enabled
self.included_schemas = included_schemas
self.excluded_schemas = excluded_schemas
self.included_keys = included_keys
self.excluded_keys = included_keys
self.included_key_regex = included_key_regex
self.excluded_key_regex = excluded_key_regex
self.included_tables_regex = included_tables_regex
self.build_script_path = build_script_path
self.venv_path = venv_path
self.python_binary = python_binary
self.key_path = key_path
self.project_id = project_id
self.key_path = key_path
self.project_credentials = project_credentials
self.page_size = page_size
self.filter_key = filter_key
self.where_clause_suffix = where_clause_suffix
self.infer_conn_string()
|
def __init__(
self,
metadata_source: str,
dialect: Optional[str] = None,
uri: Optional[str] = None,
port: Optional[int] = None,
username: Optional[str] = None,
password: Optional[str] = None,
name: Optional[str] = None,
database: Optional[str] = None,
instance: Optional[str] = None,
cluster: Optional[str] = None,
included_schemas: List = [],
excluded_schemas: List = [],
included_keys: Optional[List[str]] = None,
excluded_keys: Optional[List[str]] = None,
included_key_regex: Optional[str] = None,
excluded_key_regex: Optional[str] = None,
included_tables_regex: Optional[str] = None,
build_script_path: Optional[str] = None,
venv_path: Optional[str] = None,
python_binary: Optional[str] = None,
key_path: Optional[str] = None,
project_id: Optional[str] = None,
project_credentials: Optional[str] = None,
page_size: Optional[str] = None,
filter_key: Optional[str] = None,
where_clause_suffix: Optional[str] = "",
**kwargs,
):
self.uri = uri
self.port = port
if metadata_source is not None:
metadata_source = metadata_source.lower()
self.metadata_source = metadata_source
self.dialect = dialect
self.username = username
self.password = password
self.name = name
self.database = database
self.instance = instance
self.cluster = cluster
self.included_schemas = included_schemas
self.excluded_schemas = excluded_schemas
self.included_keys = included_keys
self.excluded_keys = included_keys
self.included_key_regex = included_key_regex
self.excluded_key_regex = excluded_key_regex
self.included_tables_regex = included_tables_regex
self.build_script_path = build_script_path
self.venv_path = venv_path
self.python_binary = python_binary
self.key_path = key_path
self.project_id = project_id
self.key_path = key_path
self.project_credentials = project_credentials
self.page_size = page_size
self.filter_key = filter_key
self.where_clause_suffix = where_clause_suffix
self.infer_conn_string()
|
https://github.com/dataframehq/whale/issues/90
|
Traceback (most recent call last):
File "/usr/local/Cellar/whale/v1.1.4/bin/../libexec/build_script.py", line 23, in <module>
pull()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/utils/task_wrappers.py", line 113, in pull
task.run()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/task/__init__.py", line 23, in run
record = self.extractor.extract()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 28, in extract
return next(self._extract_iter)
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 52, in _get_extract_iter
catalog, schema, table = self._parse_location(
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 91, in _parse_location
table = splits[-1]
IndexError: list index out of range
|
IndexError
|
def format_table_metadata(self, record) -> metadata_model_whale.TableMetadata:
block_template = textwrap.dedent(
""" # `{schema_statement}{name}`{view_statement}
`{database}`{cluster_statement}
{description}
{column_details_delimiter}
{columns}
"""
)
formatted_columns = self.format_columns(record)
if record.description:
if type(record.description) == DescriptionMetadata:
description = record.description._text + "\n"
else:
description = str(record.description) + "\n"
else:
description = ""
if record.cluster == "None": # edge case for Hive Metastore
cluster = None
else:
cluster = record.cluster
if cluster is not None:
cluster_statement = f" | `{cluster}`"
else:
cluster_statement = ""
if (
record.schema == None
): # edge case for Glue, which puts everything in record.table
schema_statement = ""
else:
schema_statement = f"{record.schema}."
markdown_blob = block_template.format(
schema_statement=schema_statement,
name=record.name,
view_statement=" [view]" if record.is_view else "",
database=record.database,
cluster_statement=cluster_statement,
description=description,
column_details_delimiter=COLUMN_DETAILS_DELIMITER,
columns=formatted_columns,
)
return metadata_model_whale.TableMetadata(
database=record.database,
cluster=record.cluster,
schema=record.schema,
name=record.name,
markdown_blob=markdown_blob,
)
|
def format_table_metadata(self, record) -> metadata_model_whale.TableMetadata:
block_template = textwrap.dedent(
""" # `{schema}.{name}`{view_statement}
`{database}`{cluster_statement}
{description}
{column_details_delimiter}
{columns}
"""
)
formatted_columns = self.format_columns(record)
if record.description:
if type(record.description) == DescriptionMetadata:
description = record.description._text + "\n"
else:
description = str(record.description) + "\n"
else:
description = ""
if record.cluster == "None": # edge case for Hive Metastore
cluster = None
else:
cluster = record.cluster
if cluster is not None:
cluster_statement = f"| `{cluster}`"
else:
cluster_statement = ""
markdown_blob = block_template.format(
schema=record.schema,
name=record.name,
view_statement=" [view]" if record.is_view else "",
database=record.database,
cluster_statement=cluster_statement,
description=description,
column_details_delimiter=COLUMN_DETAILS_DELIMITER,
columns=formatted_columns,
)
return metadata_model_whale.TableMetadata(
database=record.database,
cluster=record.cluster,
schema=record.schema,
name=record.name,
markdown_blob=markdown_blob,
)
|
https://github.com/dataframehq/whale/issues/90
|
Traceback (most recent call last):
File "/usr/local/Cellar/whale/v1.1.4/bin/../libexec/build_script.py", line 23, in <module>
pull()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/utils/task_wrappers.py", line 113, in pull
task.run()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/task/__init__.py", line 23, in run
record = self.extractor.extract()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 28, in extract
return next(self._extract_iter)
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 52, in _get_extract_iter
catalog, schema, table = self._parse_location(
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 91, in _parse_location
table = splits[-1]
IndexError: list index out of range
|
IndexError
|
def get_table_file_path_relative(database, cluster, schema, table):
if cluster is not None:
relative_file_path = TABLE_RELATIVE_FILE_PATH.format(
database=database, cluster=cluster, schema=schema, table=table
)
else:
if schema is not None:
relative_file_path = CLUSTERLESS_TABLE_RELATIVE_FILE_PATH.format(
database=database, schema=schema, table=table
)
else:
relative_file_path = SCHEMALESS_TABLE_RELATIVE_FILE_PATH.format(
database=database, table=table
)
return relative_file_path
|
def get_table_file_path_relative(database, cluster, schema, table):
if cluster is not None:
relative_file_path = TABLE_RELATIVE_FILE_PATH.format(
database=database, cluster=cluster, schema=schema, table=table
)
else:
relative_file_path = CLUSTERLESS_TABLE_RELATIVE_FILE_PATH.format(
database=database, schema=schema, table=table
)
return relative_file_path
|
https://github.com/dataframehq/whale/issues/90
|
Traceback (most recent call last):
File "/usr/local/Cellar/whale/v1.1.4/bin/../libexec/build_script.py", line 23, in <module>
pull()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/utils/task_wrappers.py", line 113, in pull
task.run()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/task/__init__.py", line 23, in run
record = self.extractor.extract()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 28, in extract
return next(self._extract_iter)
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 52, in _get_extract_iter
catalog, schema, table = self._parse_location(
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 91, in _parse_location
table = splits[-1]
IndexError: list index out of range
|
IndexError
|
def get_table_info_from_path(
file_path,
):
database = os.path.dirname(file_path)
table_string = str(file_path).split(database + "/")[-1]
database = str(database).split("/")[-1]
table_components = table_string.split(".")
table = table_components[-2]
if len(table_components) == 4:
cluster = table_components[-4]
schema = table_components[-3]
elif len(table_components) == 3:
cluster = None
schema = table_components[-3]
else:
cluster = None
schema = None
return database, cluster, schema, table
|
def get_table_info_from_path(
file_path,
):
database = os.path.dirname(file_path)
table_string = str(file_path).split(database + "/")[-1]
database = str(database).split("/")[-1]
table_components = table_string.split(".")
table = table_components[-2]
schema = table_components[-3]
if len(table_components) == 4:
cluster = table_components[-4]
else:
cluster = None
return database, cluster, schema, table
|
https://github.com/dataframehq/whale/issues/90
|
Traceback (most recent call last):
File "/usr/local/Cellar/whale/v1.1.4/bin/../libexec/build_script.py", line 23, in <module>
pull()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/utils/task_wrappers.py", line 113, in pull
task.run()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/task/__init__.py", line 23, in run
record = self.extractor.extract()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 28, in extract
return next(self._extract_iter)
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 52, in _get_extract_iter
catalog, schema, table = self._parse_location(
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 91, in _parse_location
table = splits[-1]
IndexError: list index out of range
|
IndexError
|
def configure_glue_extractors(connection: ConnectionConfigSchema):
Extractor = GlueExtractor
extractor = Extractor()
scope = extractor.get_scope()
conf = ConfigFactory.from_dict(
{
f"{scope}.{Extractor.CONNECTION_NAME_KEY}": connection.name,
f"{scope}.{Extractor.FILTER_KEY}": connection.filter_key,
f"{scope}.{Extractor.IS_LOCATION_PARSING_ENABLED_KEY}": connection.is_location_parsing_enabled,
}
)
extractors = [extractor]
return extractors, conf
|
def configure_glue_extractors(connection: ConnectionConfigSchema):
Extractor = GlueExtractor
extractor = Extractor()
scope = extractor.get_scope()
conf = ConfigFactory.from_dict(
{
f"{scope}.{Extractor.CONNECTION_NAME_KEY}": connection.name,
f"{scope}.{Extractor.FILTER_KEY}": connection.filter_key,
}
)
extractors = [extractor]
return extractors, conf
|
https://github.com/dataframehq/whale/issues/90
|
Traceback (most recent call last):
File "/usr/local/Cellar/whale/v1.1.4/bin/../libexec/build_script.py", line 23, in <module>
pull()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/utils/task_wrappers.py", line 113, in pull
task.run()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/task/__init__.py", line 23, in run
record = self.extractor.extract()
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 28, in extract
return next(self._extract_iter)
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 52, in _get_extract_iter
catalog, schema, table = self._parse_location(
File "/usr/local/Cellar/whale/v1.1.4/libexec/env/lib/python3.8/site-packages/whale/extractor/glue_extractor.py", line 91, in _parse_location
table = splits[-1]
IndexError: list index out of range
|
IndexError
|
def step(title):
if callable(title):
return StepContext(title.__name__, ({}, {}))(title)
else:
return StepContext(title, ({}, {}))
|
def step(title):
if callable(title):
return StepContext(title.__name__, [])(title)
else:
return StepContext(title, [])
|
https://github.com/allure-framework/allure-python/issues/100
|
$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
======================================================================= 1 passed in 0.70 seconds =======================================================================
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ cd /home/asdef/Dropbox/Work/Git/PyTestError/areport4
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError/areport4$ cd /home/asdef/Dropbox/Work/Git/PyTestError
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
INTERNALERROR> Traceback (most recent call last):
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 105, in wrap_session
INTERNALERROR> session.exitstatus = doit(config, session) or 0
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 141, in _main
INTERNALERROR> config.hook.pytest_runtestloop(session=session)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 614, in execute
INTERNALERROR> res = hook_impl.function(*args)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 164, in pytest_runtestloop
INTERNALERROR> item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 613, in execute
INTERNALERROR> return _wrapped_call(hook_impl.function(*args), self.execute)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 250, in _wrapped_call
INTERNALERROR> wrap_controller.send(call_outcome)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_pytest/listener.py", line 78, in pytest_runtest_protocol
INTERNALERROR> self.allure_logger.close_test(uuid)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/logger.py", line 67, in close_test
INTERNALERROR> test_case.write(self._report_dir)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 69, in write
INTERNALERROR> _write(report_dir, self, TEST_CASE_PATTERN)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 23, in _write
INTERNALERROR> json_file.write(unicode(json.dumps(data, indent=indent, ensure_ascii=False, encoding='utf8')))
INTERNALERROR> File "/usr/lib/python2.7/json/__init__.py", line 250, in dumps
INTERNALERROR> sort_keys=sort_keys, **kw).encode(obj)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 207, in encode
INTERNALERROR> chunks = self.iterencode(o, _one_shot=True)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 270, in iterencode
INTERNALERROR> return _iterencode(o, 0)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 233, in _encoder
INTERNALERROR> o = o.decode(_encoding)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/lib/python2.7/encodings/utf_8.py", line 16, in decode
INTERNALERROR> return codecs.utf_8_decode(input, errors, True)
INTERNALERROR> UnicodeDecodeError: 'utf8' codec can't decode byte 0x89 in position 0: invalid start byte
======================================================================= 1 passed in 0.70 seconds =======================================================================
|
PyTestError
|
def __enter__(self):
args, kwargs = self.params
args.update(kwargs)
params = list(args.items())
plugin_manager.hook.start_step(uuid=self.uuid, title=self.title, params=params)
|
def __enter__(self):
plugin_manager.hook.start_step(uuid=self.uuid, title=self.title, params=self.params)
|
https://github.com/allure-framework/allure-python/issues/100
|
$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
======================================================================= 1 passed in 0.70 seconds =======================================================================
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ cd /home/asdef/Dropbox/Work/Git/PyTestError/areport4
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError/areport4$ cd /home/asdef/Dropbox/Work/Git/PyTestError
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
INTERNALERROR> Traceback (most recent call last):
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 105, in wrap_session
INTERNALERROR> session.exitstatus = doit(config, session) or 0
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 141, in _main
INTERNALERROR> config.hook.pytest_runtestloop(session=session)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 614, in execute
INTERNALERROR> res = hook_impl.function(*args)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 164, in pytest_runtestloop
INTERNALERROR> item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 613, in execute
INTERNALERROR> return _wrapped_call(hook_impl.function(*args), self.execute)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 250, in _wrapped_call
INTERNALERROR> wrap_controller.send(call_outcome)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_pytest/listener.py", line 78, in pytest_runtest_protocol
INTERNALERROR> self.allure_logger.close_test(uuid)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/logger.py", line 67, in close_test
INTERNALERROR> test_case.write(self._report_dir)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 69, in write
INTERNALERROR> _write(report_dir, self, TEST_CASE_PATTERN)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 23, in _write
INTERNALERROR> json_file.write(unicode(json.dumps(data, indent=indent, ensure_ascii=False, encoding='utf8')))
INTERNALERROR> File "/usr/lib/python2.7/json/__init__.py", line 250, in dumps
INTERNALERROR> sort_keys=sort_keys, **kw).encode(obj)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 207, in encode
INTERNALERROR> chunks = self.iterencode(o, _one_shot=True)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 270, in iterencode
INTERNALERROR> return _iterencode(o, 0)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 233, in _encoder
INTERNALERROR> o = o.decode(_encoding)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/lib/python2.7/encodings/utf_8.py", line 16, in decode
INTERNALERROR> return codecs.utf_8_decode(input, errors, True)
INTERNALERROR> UnicodeDecodeError: 'utf8' codec can't decode byte 0x89 in position 0: invalid start byte
======================================================================= 1 passed in 0.70 seconds =======================================================================
|
PyTestError
|
def __call__(self, func):
@wraps(func)
def impl(*a, **kw):
__tracebackhide__ = True
params = func_parameters(func, *a, **kw)
args, kwargs = params
with StepContext(self.title.format(*args.values(), **kwargs), params):
return func(*a, **kw)
return impl
|
def __call__(self, func):
@wraps(func)
def impl(*a, **kw):
__tracebackhide__ = True
params = func_parameters(func, *a, **kw)
with StepContext(self.title.format(*a, **kw), params):
return func(*a, **kw)
return impl
|
https://github.com/allure-framework/allure-python/issues/100
|
$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
======================================================================= 1 passed in 0.70 seconds =======================================================================
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ cd /home/asdef/Dropbox/Work/Git/PyTestError/areport4
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError/areport4$ cd /home/asdef/Dropbox/Work/Git/PyTestError
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
INTERNALERROR> Traceback (most recent call last):
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 105, in wrap_session
INTERNALERROR> session.exitstatus = doit(config, session) or 0
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 141, in _main
INTERNALERROR> config.hook.pytest_runtestloop(session=session)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 614, in execute
INTERNALERROR> res = hook_impl.function(*args)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 164, in pytest_runtestloop
INTERNALERROR> item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 613, in execute
INTERNALERROR> return _wrapped_call(hook_impl.function(*args), self.execute)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 250, in _wrapped_call
INTERNALERROR> wrap_controller.send(call_outcome)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_pytest/listener.py", line 78, in pytest_runtest_protocol
INTERNALERROR> self.allure_logger.close_test(uuid)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/logger.py", line 67, in close_test
INTERNALERROR> test_case.write(self._report_dir)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 69, in write
INTERNALERROR> _write(report_dir, self, TEST_CASE_PATTERN)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 23, in _write
INTERNALERROR> json_file.write(unicode(json.dumps(data, indent=indent, ensure_ascii=False, encoding='utf8')))
INTERNALERROR> File "/usr/lib/python2.7/json/__init__.py", line 250, in dumps
INTERNALERROR> sort_keys=sort_keys, **kw).encode(obj)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 207, in encode
INTERNALERROR> chunks = self.iterencode(o, _one_shot=True)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 270, in iterencode
INTERNALERROR> return _iterencode(o, 0)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 233, in _encoder
INTERNALERROR> o = o.decode(_encoding)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/lib/python2.7/encodings/utf_8.py", line 16, in decode
INTERNALERROR> return codecs.utf_8_decode(input, errors, True)
INTERNALERROR> UnicodeDecodeError: 'utf8' codec can't decode byte 0x89 in position 0: invalid start byte
======================================================================= 1 passed in 0.70 seconds =======================================================================
|
PyTestError
|
def impl(*a, **kw):
__tracebackhide__ = True
params = func_parameters(func, *a, **kw)
args, kwargs = params
with StepContext(self.title.format(*args.values(), **kwargs), params):
return func(*a, **kw)
|
def impl(*a, **kw):
__tracebackhide__ = True
params = func_parameters(func, *a, **kw)
with StepContext(self.title.format(*a, **kw), params):
return func(*a, **kw)
|
https://github.com/allure-framework/allure-python/issues/100
|
$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
======================================================================= 1 passed in 0.70 seconds =======================================================================
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ cd /home/asdef/Dropbox/Work/Git/PyTestError/areport4
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError/areport4$ cd /home/asdef/Dropbox/Work/Git/PyTestError
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
INTERNALERROR> Traceback (most recent call last):
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 105, in wrap_session
INTERNALERROR> session.exitstatus = doit(config, session) or 0
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 141, in _main
INTERNALERROR> config.hook.pytest_runtestloop(session=session)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 614, in execute
INTERNALERROR> res = hook_impl.function(*args)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 164, in pytest_runtestloop
INTERNALERROR> item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 613, in execute
INTERNALERROR> return _wrapped_call(hook_impl.function(*args), self.execute)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 250, in _wrapped_call
INTERNALERROR> wrap_controller.send(call_outcome)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_pytest/listener.py", line 78, in pytest_runtest_protocol
INTERNALERROR> self.allure_logger.close_test(uuid)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/logger.py", line 67, in close_test
INTERNALERROR> test_case.write(self._report_dir)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 69, in write
INTERNALERROR> _write(report_dir, self, TEST_CASE_PATTERN)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 23, in _write
INTERNALERROR> json_file.write(unicode(json.dumps(data, indent=indent, ensure_ascii=False, encoding='utf8')))
INTERNALERROR> File "/usr/lib/python2.7/json/__init__.py", line 250, in dumps
INTERNALERROR> sort_keys=sort_keys, **kw).encode(obj)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 207, in encode
INTERNALERROR> chunks = self.iterencode(o, _one_shot=True)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 270, in iterencode
INTERNALERROR> return _iterencode(o, 0)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 233, in _encoder
INTERNALERROR> o = o.decode(_encoding)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/lib/python2.7/encodings/utf_8.py", line 16, in decode
INTERNALERROR> return codecs.utf_8_decode(input, errors, True)
INTERNALERROR> UnicodeDecodeError: 'utf8' codec can't decode byte 0x89 in position 0: invalid start byte
======================================================================= 1 passed in 0.70 seconds =======================================================================
|
PyTestError
|
def func_parameters(func, *a, **kw):
bowels = (
inspect.getargspec(func)
if sys.version_info.major < 3
else inspect.getfullargspec(func)
)
args_dict = dict(zip(bowels.args, map(represent, a)))
kwargs_dict = dict(zip(kw, list(map(lambda i: represent(kw[i]), kw))))
kwarg_defaults = dict(
zip(
reversed(bowels.args), reversed(list(map(represent, bowels.defaults or ())))
)
)
kwarg_defaults.update(kwargs_dict)
return args_dict, kwarg_defaults
|
def func_parameters(func, *a, **kw):
if sys.version_info.major < 3:
all_names = inspect.getargspec(func).args
defaults = inspect.getargspec(func).defaults
else:
all_names = inspect.getfullargspec(func).args
defaults = inspect.getfullargspec(func).defaults
args_part = [(n, str(v)) for n, v in zip(all_names, a)]
kwarg_part = [
(n, str(kw[n]) if n in kw else str(defaults[i]))
for i, n in enumerate(all_names[len(a) :])
]
return args_part + kwarg_part
|
https://github.com/allure-framework/allure-python/issues/100
|
$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
======================================================================= 1 passed in 0.70 seconds =======================================================================
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ cd /home/asdef/Dropbox/Work/Git/PyTestError/areport4
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError/areport4$ cd /home/asdef/Dropbox/Work/Git/PyTestError
(autotesting)asdef@asdef-pc:~/Dropbox/Work/Git/PyTestError$ pytest test_3.py --alluredir areport4
========================================================================= test session starts ==========================================================================
platform linux2 -- Python 2.7.8, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
metadata: {'Python': '2.7.8', 'Platform': 'Linux-4.4.0-85-generic-x86_64-with-Ubuntu-14.04-trusty', 'Packages': {'py': '1.4.34', 'pytest': '3.1.3', 'pluggy': '0.4.0'}, 'JAVA_HOME': '/usr/lib/jvm/java-8-oracle', 'Plugins': {'xdist': '1.18.1', 'allure-pytest': '2.1.0b1', 'html': '1.14.2', 'PyTestTelegramReport': '0.0.7', 'instafail': '0.3.0', 'metadata': '1.5.0'}}
rootdir: /home/asdef/Dropbox/Work/Git/PyTestError, inifile:
plugins: xdist-1.18.1, metadata-1.5.0, instafail-0.3.0, html-1.14.2, allure-pytest-2.1.0b1, PyTestTelegramReport-0.0.7
collected 1 item s
test_3.py .
INTERNALERROR> Traceback (most recent call last):
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 105, in wrap_session
INTERNALERROR> session.exitstatus = doit(config, session) or 0
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 141, in _main
INTERNALERROR> config.hook.pytest_runtestloop(session=session)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 614, in execute
INTERNALERROR> res = hook_impl.function(*args)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/main.py", line 164, in pytest_runtestloop
INTERNALERROR> item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 745, in __call__
INTERNALERROR> return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 339, in _hookexec
INTERNALERROR> return self._inner_hookexec(hook, methods, kwargs)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 334, in <lambda>
INTERNALERROR> _MultiCall(methods, kwargs, hook.spec_opts).execute()
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 613, in execute
INTERNALERROR> return _wrapped_call(hook_impl.function(*args), self.execute)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py", line 250, in _wrapped_call
INTERNALERROR> wrap_controller.send(call_outcome)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_pytest/listener.py", line 78, in pytest_runtest_protocol
INTERNALERROR> self.allure_logger.close_test(uuid)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/logger.py", line 67, in close_test
INTERNALERROR> test_case.write(self._report_dir)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 69, in write
INTERNALERROR> _write(report_dir, self, TEST_CASE_PATTERN)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/local/lib/python2.7/site-packages/allure_commons/model2.py", line 23, in _write
INTERNALERROR> json_file.write(unicode(json.dumps(data, indent=indent, ensure_ascii=False, encoding='utf8')))
INTERNALERROR> File "/usr/lib/python2.7/json/__init__.py", line 250, in dumps
INTERNALERROR> sort_keys=sort_keys, **kw).encode(obj)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 207, in encode
INTERNALERROR> chunks = self.iterencode(o, _one_shot=True)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 270, in iterencode
INTERNALERROR> return _iterencode(o, 0)
INTERNALERROR> File "/usr/lib/python2.7/json/encoder.py", line 233, in _encoder
INTERNALERROR> o = o.decode(_encoding)
INTERNALERROR> File "/home/asdef/.virtualenvs/autotesting/lib/python2.7/encodings/utf_8.py", line 16, in decode
INTERNALERROR> return codecs.utf_8_decode(input, errors, True)
INTERNALERROR> UnicodeDecodeError: 'utf8' codec can't decode byte 0x89 in position 0: invalid start byte
======================================================================= 1 passed in 0.70 seconds =======================================================================
|
PyTestError
|
def selected(self) -> Optional[Gtk.TreeIter]:
(model, tree_iter) = self.selection.get_selected()
return tree_iter
|
def selected(self) -> Gtk.TreeIter:
(model, tree_iter) = self.selection.get_selected()
assert tree_iter is not None
return tree_iter
|
https://github.com/blueman-project/blueman/issues/1420
|
Traceback (most recent call last):
File "/home/cschramm/src/blueman/blueman/gui/manager/ManagerMenu.py", line 249, in on_adapter_selected
self.blueman.List.set_adapter(adapter_path)
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 245, in set_adapter
self.clear()
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 354, in clear
self.device_remove_event(device)
File "/home/cschramm/src/blueman/blueman/gui/manager/ManagerDeviceList.py", line 233, in device_remove_event
super().device_remove_event(device)
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 237, in device_remove_event
if self.compare(self.selected(), tree_iter):
File "/home/cschramm/src/blueman/blueman/gui/GenericList.py", line 64, in selected
assert tree_iter is not None
AssertionError
|
AssertionError
|
def compare(
self, iter_a: Optional[Gtk.TreeIter], iter_b: Optional[Gtk.TreeIter]
) -> bool:
if iter_a is not None and iter_b is not None:
model = self.get_model()
assert model is not None
return model.get_path(iter_a) == model.get_path(iter_b)
else:
return False
|
def compare(self, iter_a: Gtk.TreeIter, iter_b: Gtk.TreeIter) -> bool:
if iter_a is not None and iter_b is not None:
model = self.get_model()
assert model is not None
return model.get_path(iter_a) == model.get_path(iter_b)
else:
return False
|
https://github.com/blueman-project/blueman/issues/1420
|
Traceback (most recent call last):
File "/home/cschramm/src/blueman/blueman/gui/manager/ManagerMenu.py", line 249, in on_adapter_selected
self.blueman.List.set_adapter(adapter_path)
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 245, in set_adapter
self.clear()
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 354, in clear
self.device_remove_event(device)
File "/home/cschramm/src/blueman/blueman/gui/manager/ManagerDeviceList.py", line 233, in device_remove_event
super().device_remove_event(device)
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 237, in device_remove_event
if self.compare(self.selected(), tree_iter):
File "/home/cschramm/src/blueman/blueman/gui/GenericList.py", line 64, in selected
assert tree_iter is not None
AssertionError
|
AssertionError
|
def _on_popup_menu(self, _widget: Gtk.Widget) -> bool:
if self.Blueman is None:
return False
if self.menu is None:
self.menu = ManagerDeviceMenu(self.Blueman)
window = self.get_window()
assert window is not None
selected = self.selected()
assert selected is not None
rect = self.get_cell_area(self.liststore.get_path(selected), self.get_column(1))
self.menu.popup_at_rect(window, rect, Gdk.Gravity.CENTER, Gdk.Gravity.NORTH)
return True
|
def _on_popup_menu(self, _widget: Gtk.Widget) -> bool:
if self.Blueman is None:
return False
if self.menu is None:
self.menu = ManagerDeviceMenu(self.Blueman)
window = self.get_window()
assert window is not None
rect = self.get_cell_area(
self.liststore.get_path(self.selected()), self.get_column(1)
)
self.menu.popup_at_rect(window, rect, Gdk.Gravity.CENTER, Gdk.Gravity.NORTH)
return True
|
https://github.com/blueman-project/blueman/issues/1420
|
Traceback (most recent call last):
File "/home/cschramm/src/blueman/blueman/gui/manager/ManagerMenu.py", line 249, in on_adapter_selected
self.blueman.List.set_adapter(adapter_path)
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 245, in set_adapter
self.clear()
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 354, in clear
self.device_remove_event(device)
File "/home/cschramm/src/blueman/blueman/gui/manager/ManagerDeviceList.py", line 233, in device_remove_event
super().device_remove_event(device)
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 237, in device_remove_event
if self.compare(self.selected(), tree_iter):
File "/home/cschramm/src/blueman/blueman/gui/GenericList.py", line 64, in selected
assert tree_iter is not None
AssertionError
|
AssertionError
|
def on_selection_changed(self, _selection: Gtk.TreeSelection) -> None:
tree_iter = self.List.selected()
assert tree_iter
if self.List.get_cursor()[0]:
# GtkTreePath returns row when used as string
self.Config["services-last-item"] = int(str(self.List.get_cursor()[0]))
row = self.List.get(tree_iter, "id")
rowid = row["id"]
self.set_page(rowid)
|
def on_selection_changed(self, _selection: Gtk.TreeSelection) -> None:
tree_iter = self.List.selected()
if self.List.get_cursor()[0]:
# GtkTreePath returns row when used as string
self.Config["services-last-item"] = int(str(self.List.get_cursor()[0]))
row = self.List.get(tree_iter, "id")
rowid = row["id"]
self.set_page(rowid)
|
https://github.com/blueman-project/blueman/issues/1420
|
Traceback (most recent call last):
File "/home/cschramm/src/blueman/blueman/gui/manager/ManagerMenu.py", line 249, in on_adapter_selected
self.blueman.List.set_adapter(adapter_path)
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 245, in set_adapter
self.clear()
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 354, in clear
self.device_remove_event(device)
File "/home/cschramm/src/blueman/blueman/gui/manager/ManagerDeviceList.py", line 233, in device_remove_event
super().device_remove_event(device)
File "/home/cschramm/src/blueman/blueman/gui/DeviceList.py", line 237, in device_remove_event
if self.compare(self.selected(), tree_iter):
File "/home/cschramm/src/blueman/blueman/gui/GenericList.py", line 64, in selected
assert tree_iter is not None
AssertionError
|
AssertionError
|
def ask_passkey(
self, dialog_msg, notify_msg, is_numeric, notification, device_path, ok, err
):
def passkey_dialog_cb(dialog, response_id):
if response_id == Gtk.ResponseType.ACCEPT:
ret = pin_entry.get_text()
ok(int(ret) if is_numeric else ret)
else:
err(BluezErrorRejected("Rejected"))
dialog.destroy()
self.dialog = None
dev_str = self.get_device_string(device_path)
notify_message = _("Pairing request for %s") % dev_str
if self.dialog:
logging.info("Agent: Another dialog still active, cancelling")
err(BluezErrorCanceled("Canceled"))
self.dialog, pin_entry = self.build_passkey_dialog(dev_str, dialog_msg, is_numeric)
if not self.dialog:
logging.error("Agent: Failed to build dialog")
err(BluezErrorCanceled("Canceled"))
if notification:
Notification(
_("Bluetooth Authentication"), notify_message, icon_name="blueman"
).show()
self.dialog.connect("response", passkey_dialog_cb)
self.dialog.present()
|
def ask_passkey(
self, dialog_msg, notify_msg, is_numeric, notification, device_path, ok, err
):
def passkey_dialog_cb(dialog, response_id):
if response_id == Gtk.ResponseType.ACCEPT:
ret = pin_entry.get_text()
if is_numeric:
ret = GLib.Variant("(u)", int(ret))
ok(ret)
else:
err(BluezErrorRejected("Rejected"))
dialog.destroy()
self.dialog = None
dev_str = self.get_device_string(device_path)
notify_message = _("Pairing request for %s") % dev_str
if self.dialog:
logging.info("Agent: Another dialog still active, cancelling")
err(BluezErrorCanceled("Canceled"))
self.dialog, pin_entry = self.build_passkey_dialog(dev_str, dialog_msg, is_numeric)
if not self.dialog:
logging.error("Agent: Failed to build dialog")
err(BluezErrorCanceled("Canceled"))
if notification:
Notification(
_("Bluetooth Authentication"), notify_message, icon_name="blueman"
).show()
self.dialog.connect("response", passkey_dialog_cb)
self.dialog.present()
|
https://github.com/blueman-project/blueman/issues/1159
|
(blueman-tray:79548): Gdk-CRITICAL **: 16:36:17.940: gdk_window_thaw_toplevel_updates: assertion 'window->update_and_descendants_freeze_count > 0' failed
blueman-manager version 2.1.1 starting
Stale PID, overwriting
blueman-manager 16.36.23 WARNING DeviceList:193 monitor_power_levels: Failed to get power levels, probably a LE device.
blueman-manager 16.36.33 WARNING DeviceList:193 monitor_power_levels: Failed to get power levels, probably a LE device.
/usr/lib/python3.8/site-packages/gi/overrides/Gtk.py:1630: Warning: g_value_get_int: assertion 'G_VALUE_HOLDS_INT (value)' failed
return _Gtk_main(*args, **kwargs)
Traceback (most recent call last):
File "/usr/lib/python3.8/site-packages/gi/overrides/GLib.py", line 130, in _create
iter(value)
TypeError: 'int' object is not iterable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/lib/python3.8/site-packages/blueman/main/applet/BluezAgent.py", line 172, in passkey_dialog_cb
ret = GLib.Variant('(u)', int(ret))
File "/usr/lib/python3.8/site-packages/gi/overrides/GLib.py", line 189, in __new__
v = creator._create(format_string, value)
File "/usr/lib/python3.8/site-packages/gi/overrides/GLib.py", line 132, in _create
raise TypeError("Could not create array, tuple or dictionary entry from non iterable value %s %s" %
TypeError: Could not create array, tuple or dictionary entry from non iterable value (u) 401704
blueman-manager 16.37.04 ERROR Manager:201 error_handler: Authentication Canceled
Traceback (most recent call last):
File "/usr/lib/python3.8/site-packages/blueman/bluez/Base.py", line 81, in callback
value = proxy.call_finish(result).unpack()
gi.repository.GLib.GError: g-io-error-quark: GDBus.Error:org.bluez.Error.AuthenticationCanceled: Authentication Canceled (36)
|
TypeError
|
def passkey_dialog_cb(dialog, response_id):
if response_id == Gtk.ResponseType.ACCEPT:
ret = pin_entry.get_text()
ok(int(ret) if is_numeric else ret)
else:
err(BluezErrorRejected("Rejected"))
dialog.destroy()
self.dialog = None
|
def passkey_dialog_cb(dialog, response_id):
if response_id == Gtk.ResponseType.ACCEPT:
ret = pin_entry.get_text()
if is_numeric:
ret = GLib.Variant("(u)", int(ret))
ok(ret)
else:
err(BluezErrorRejected("Rejected"))
dialog.destroy()
self.dialog = None
|
https://github.com/blueman-project/blueman/issues/1159
|
(blueman-tray:79548): Gdk-CRITICAL **: 16:36:17.940: gdk_window_thaw_toplevel_updates: assertion 'window->update_and_descendants_freeze_count > 0' failed
blueman-manager version 2.1.1 starting
Stale PID, overwriting
blueman-manager 16.36.23 WARNING DeviceList:193 monitor_power_levels: Failed to get power levels, probably a LE device.
blueman-manager 16.36.33 WARNING DeviceList:193 monitor_power_levels: Failed to get power levels, probably a LE device.
/usr/lib/python3.8/site-packages/gi/overrides/Gtk.py:1630: Warning: g_value_get_int: assertion 'G_VALUE_HOLDS_INT (value)' failed
return _Gtk_main(*args, **kwargs)
Traceback (most recent call last):
File "/usr/lib/python3.8/site-packages/gi/overrides/GLib.py", line 130, in _create
iter(value)
TypeError: 'int' object is not iterable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/lib/python3.8/site-packages/blueman/main/applet/BluezAgent.py", line 172, in passkey_dialog_cb
ret = GLib.Variant('(u)', int(ret))
File "/usr/lib/python3.8/site-packages/gi/overrides/GLib.py", line 189, in __new__
v = creator._create(format_string, value)
File "/usr/lib/python3.8/site-packages/gi/overrides/GLib.py", line 132, in _create
raise TypeError("Could not create array, tuple or dictionary entry from non iterable value %s %s" %
TypeError: Could not create array, tuple or dictionary entry from non iterable value (u) 401704
blueman-manager 16.37.04 ERROR Manager:201 error_handler: Authentication Canceled
Traceback (most recent call last):
File "/usr/lib/python3.8/site-packages/blueman/bluez/Base.py", line 81, in callback
value = proxy.call_finish(result).unpack()
gi.repository.GLib.GError: g-io-error-quark: GDBus.Error:org.bluez.Error.AuthenticationCanceled: Authentication Canceled (36)
|
TypeError
|
def __init__(self, plugin_class, module_path, parent):
super().__init__()
self.__deps: Dict[str, str] = {}
self.__cfls: Dict[str, str] = {}
self.__plugins: Dict[str, Union[AppletPlugin, ManagerPlugin]] = {}
self.__classes: Dict[str, Type[Union[AppletPlugin, ManagerPlugin]]] = {}
self.__loaded: List[str] = []
self.parent = parent
self.module_path = module_path
self.plugin_class = plugin_class
|
def __init__(self, plugin_class, module_path, parent):
super().__init__()
self.__plugins: Dict[str, T] = {}
self.__classes: Dict[str, Type[T]] = {}
self.__deps: Dict[str, str] = {}
self.__cfls: Dict[str, str] = {}
self.__loaded: List[str] = []
self.parent = parent
self.module_path = module_path
self.plugin_class = plugin_class
|
https://github.com/blueman-project/blueman/issues/1130
|
$ git clone https://github.com/blueman-project/blueman.git
…
$ cd blueman/
$ ./autogen.sh
…
$ make
…
$ PYTHONPATH=$(pwd) python3.6 ./blueman/main/Applet.py
Traceback (most recent call last):
File "./blueman/main/Applet.py", line 3, in <module>
import blueman.bluez as bluez
File "/home/dakkar/src/blueman/blueman/bluez/__init__.py", line 6, in <module>
import blueman.bluez.errors as errors
AttributeError: module 'blueman' has no attribute 'bluez'
# BUT!
$ git checkout 2.1.1
…
$ PYTHONPATH=$(pwd) python3.6 ./blueman/main/Applet.py
$
|
AttributeError
|
def set_adapter(self, adapter=None):
self.clear()
if self.discovering:
self.stop_discovery()
self.emit("adapter-property-changed", self.Adapter, ("Discovering", False))
adapter = adapter_path_to_name(adapter)
logging.debug("Setting adapter to: %s " % adapter)
if adapter is not None:
try:
self.Adapter = self.manager.get_adapter(adapter)
self.__adapter_path = self.Adapter.get_object_path()
self.emit("adapter-changed", self.__adapter_path)
except bluez.errors.DBusNoSuchAdapterError:
logging.warning("Failed to set adapter, trying first available.")
self.set_adapter(None)
return
else:
adapters = self.manager.get_adapters()
if len(adapters) > 0:
self.Adapter = adapters[0]
self.__adapter_path = self.Adapter.get_object_path()
else:
self.Adapter = None
self.__adapter_path = None
self.emit("adapter-changed", self.__adapter_path)
|
def set_adapter(self, adapter=None):
self.clear()
if self.discovering:
self.stop_discovery()
self.emit("adapter-property-changed", self.Adapter, ("Discovering", False))
adapter = adapter_path_to_name(adapter)
logging.debug(adapter)
# The pattern may be incorrect (ie removed adapter), see #590
try:
self.Adapter = self.manager.get_adapter(adapter)
except bluez.errors.DBusNoSuchAdapterError:
logging.info("Adapter pattern not valid, trying default adapter.")
try:
self.Adapter = self.manager.get_adapter()
self.__adapter_path = self.Adapter.get_object_path()
except bluez.errors.DBusNoSuchAdapterError as e:
logging.exception(e)
self.Adapter = None
self.__adapter_path = None
finally:
self.emit("adapter-changed", None)
|
https://github.com/blueman-project/blueman/issues/1050
|
$ blueman-manager
blueman-manager version 2.1 starting
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/blueman/gui/manager/ManagerDeviceList.py", line 212, in on_finished
super().device_remove_event(device, tree_iter)
TypeError: super(type, obj): obj must be an instance or subtype of type
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/blueman/gui/manager/ManagerDeviceList.py", line 212, in on_finished
super().device_remove_event(device, tree_iter)
TypeError: super(type, obj): obj must be an instance or subtype of type
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/blueman/gui/manager/ManagerDeviceList.py", line 212, in on_finished
super().device_remove_event(device, tree_iter)
TypeError: super(type, obj): obj must be an instance or subtype of type
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/blueman/gui/manager/ManagerDeviceList.py", line 212, in on_finished
super().device_remove_event(device, tree_iter)
TypeError: super(type, obj): obj must be an instance or subtype of type
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/blueman/gui/manager/ManagerDeviceList.py", line 212, in on_finished
super().device_remove_event(device, tree_iter)
TypeError: super(type, obj): obj must be an instance or subtype of type
|
TypeError
|
def on_item_activated(self, item):
logging.info("Connect %s %s" % (item["address"], item["uuid"]))
item["mitem"]["sensitive"] = False
self.parent.Plugins.Menu.on_menu_changed()
def reply(*args):
Notification(
_("Connected"),
_("Connected to %s") % item["mitem"]["text"],
icon_name=item["icon"],
).show()
item["mitem"]["sensitive"] = True
self.parent.Plugins.Menu.on_menu_changed()
def err(reason):
Notification(
_("Failed to connect"),
str(reason).split(": ")[-1],
icon_name="dialog-error",
).show()
item["mitem"]["sensitive"] = True
self.parent.Plugins.Menu.on_menu_changed()
self.parent.Plugins.DBusService.connect_service(
item["device"], item["uuid"], reply, err
)
|
def on_item_activated(self, item):
logging.info("Connect %s %s" % (item["address"], item["uuid"]))
item["mitem"]["sensitive"] = False
self.parent.Plugins.Menu.on_menu_changed()
def reply(*args):
Notification(
_("Connected"),
_("Connected to %s") % item["mitem"]["text"],
icon_name=item["icon"],
).show()
item["mitem"]["sensitive"] = True
self.parent.Plugins.Menu.on_menu_changed()
def err(reason):
Notification(
_("Failed to connect"),
str(reason).split(": ")[-1],
icon_name="dialog-error",
).show()
item["mitem"]["sensitive"] = True
self.parent.Plugins.Menu.on_menu_changed()
self.parent.DbusSvc.connect_service(item["device"], item["uuid"], reply, err)
|
https://github.com/blueman-project/blueman/issues/1059
|
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/blueman/main/indicators/GtkStatusIcon.py", line 19, in <lambda>
gtk_item.connect('activate', lambda _, idx=index: activate(idx))
File "/usr/lib/python3.7/site-packages/blueman/main/indicators/GtkStatusIcon.py", line 21, in <lambda>
gtk_item.set_submenu(build_menu(item['submenu'], lambda subid, idx=index: activate(idx, subid)))
File "/usr/lib/python3.7/site-packages/blueman/main/Tray.py", line 37, in _activate_menu_item
return AppletService().ActivateMenuItem('(ai)', indexes)
File "/usr/lib/python3.7/site-packages/gi/overrides/Gio.py", line 354, in __call__
None)
gi.repository.GLib.Error: g-dbus-error-quark: GDBus.Error:org.freedesktop.DBus.Error.Failed: Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/blueman/main/DbusService.py", line 124, in _handle_method_call
ok(method(*args))
File "/usr/lib/python3.7/site-packages/blueman/plugins/applet/Menu.py", line 144, in _activate_menu_item
node.callback()
File "/usr/lib/python3.7/site-packages/blueman/plugins/applet/RecentConns.py", line 216, in <lambda>
mitem = {"icon_name": item["icon"], "callback": lambda itm=item: self.on_item_activated(itm)}
File "/usr/lib/python3.7/site-packages/blueman/plugins/applet/RecentConns.py", line 212, in on_item_activated
self.parent.DbusSvc.connect_service(item["device"], item["uuid"], reply, err)
AttributeError: 'DbusService' object has no attribute 'connect_service'
|
gi.repository.GLib.Error
|
def get(self, name):
prop = self._dbus_proxy.get_cached_property(name)
if prop is None and name in self.__fallback:
return self.__fallback[name]
elif prop is None:
# Fallback when cached property is not available
param = GLib.Variant("(ss)", (self._interface_name, name))
try:
prop = self._call("Get", param, True)
return prop[0]
except GLib.GError:
raise BluezDBusException("No such property '%s'" % name)
elif prop is not None:
return prop.unpack()
else:
raise BluezDBusException("No such property '%s'" % name)
|
def get(self, name):
prop = self._dbus_proxy.get_cached_property(name)
if prop is not None:
return prop.unpack()
elif not prop and name in self.__fallback:
return self.__fallback[name]
else:
raise BluezDBusException("No such property '%s'" % name)
|
https://github.com/blueman-project/blueman/issues/555
|
_________
_on_dbus_name_vanished (/usr/bin/blueman-applet:84)
org.bluez
_________
update_menuitems (/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py:116)
warning: Adapter is None
_________
on_unregistered (/usr/lib64/python3.5/site-packages/blueman/bluez/obex/AgentManager.py:28)
/org/bluez/obex/agent/blueman
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0
_________
on_adapter_added (/usr/bin/blueman-applet:99)
Adapter added /org/bluez/hci0
Traceback (most recent call last):
File "/usr/bin/blueman-applet", line 107, in on_adapter_added
wait_for_adapter(adapter, on_activate)
File "/usr/lib64/python3.5/site-packages/blueman/Functions.py", line 129, in wait_for_adapter
if props["Address"] != "00:00:00:00:00:00":
TypeError: 'NoneType' object is not subscriptable
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_00_1F_20_3A_89_89
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_00_1F_20_3A_89_89
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_6C_83_36_B9_C6_D6
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_6C_83_36_B9_C6_D6
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_00_07_61_48_7A_2C
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_00_07_61_48_7A_2C
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_40_F4_07_CB_E4_D7
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_40_F4_07_CB_E4_D7
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_30_75_12_04_F7_2F
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_30_75_12_04_F7_2F
_________
_on_dbus_name_appeared (/usr/bin/blueman-applet:78)
org.bluez :1.336
_________
Run (/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py:208)
Function on_manager_state_changed on DiscvManager Failed
Traceback (most recent call last):
File "/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py", line 212, in Run
ret = getattr(inst, function)(*args, **kwargs)
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 57, in on_manager_state_changed
self.update_menuitems()
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 123, in update_menuitems
if (not props["Discoverable"] or props["DiscoverableTimeout"] > 0) and props["Powered"]:
TypeError: 'NoneType' object is not subscriptable
_________
set_nap (/usr/lib64/python3.5/site-packages/blueman/plugins/applet/Networking.py:72)
set nap True
_________
Run (/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py:208)
Function on_manager_state_changed on RecentConns Failed
Traceback (most recent call last):
File "/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py", line 212, in Run
ret = getattr(inst, function)(*args, **kwargs)
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/RecentConns.py", line 172, in on_manager_state_changed
self.Adapters[str(adapter.get_object_path())] = str(p["Address"])
TypeError: 'NoneType' object is not subscriptable
_________
register_agent (/usr/lib64/python3.5/site-packages/blueman/main/applet/BluezAgent.py:35)
_________
on_adapter_property_changed (/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py:89)
prop UUIDs ['00001801-0000-1000-8000-00805f9b34fb', '0000110e-0000-1000-8000-00805f9b34fb', '00001106-0000-1000-8000-00805f9b34fb', '00001800-0000-1000-8000-00805f9b34fb', '00001105-0000-1000-8000-00805f9b34fb', '00001200-0000-1000-8000-00805f9b34fb', '0000110c-0000-1000-8000-00805f9b34fb', '00001104-0000-1000-8000-00805f9b34fb', '0000110a-0000-1000-8000-00805f9b34fb', '00001133-0000-1000-8000-00805f9b34fb', '0000112f-0000-1000-8000-00805f9b34fb', '00001132-0000-1000-8000-00805f9b34fb']
_________
Run (/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py:208)
Function on_adapter_property_changed on DiscvManager Failed
Traceback (most recent call last):
File "/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py", line 212, in Run
ret = getattr(inst, function)(*args, **kwargs)
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 114, in on_adapter_property_changed
self.update_menuitems()
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 123, in update_menuitems
if (not props["Discoverable"] or props["DiscoverableTimeout"] > 0) and props["Powered"]:
TypeError: 'NoneType' object is not subscriptable
_________
_on_properties_changed (/usr/lib64/python3.5/site-packages/blueman/bluez/PropertiesBase.py:38)
/org/bluez/hci0 UUIDs ['00001801-0000-1000-8000-00805f9b34fb', '0000110e-0000-1000-8000-00805f9b34fb', '00001106-0000-1000-8000-00805f9b34fb', '00001800-0000-1000-8000-00805f9b34fb', '00001105-0000-1000-8000-00805f9b34fb', '00001200-0000-1000-8000-00805f9b34fb', '0000110c-0000-1000-8000-00805f9b34fb', '00001104-0000-1000-8000-00805f9b34fb', '0000110a-0000-1000-8000-00805f9b34fb', '00001133-0000-1000-8000-00805f9b34fb', '0000112f-0000-1000-8000-00805f9b34fb', '00001132-0000-1000-8000-00805f9b34fb']
_________
on_adapter_property_changed (/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py:89)
prop UUIDs ['00001112-0000-1000-8000-00805f9b34fb', '00001801-0000-1000-8000-00805f9b34fb', '0000110e-0000-1000-8000-00805f9b34fb', '00001106-0000-1000-8000-00805f9b34fb', '00001800-0000-1000-8000-00805f9b34fb', '00001105-0000-1000-8000-00805f9b34fb', '00001200-0000-1000-8000-00805f9b34fb', '0000110c-0000-1000-8000-00805f9b34fb', '00001104-0000-1000-8000-00805f9b34fb', '0000110a-0000-1000-8000-00805f9b34fb', '0000110b-0000-1000-8000-00805f9b34fb', '00001133-0000-1000-8000-00805f9b34fb', '0000112f-0000-1000-8000-00805f9b34fb', '00001132-0000-1000-8000-00805f9b34fb']
_________
Run (/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py:208)
Function on_adapter_property_changed on DiscvManager Failed
Traceback (most recent call last):
File "/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py", line 212, in Run
ret = getattr(inst, function)(*args, **kwargs)
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 114, in on_adapter_property_changed
self.update_menuitems()
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 123, in update_menuitems
if (not props["Discoverable"] or props["DiscoverableTimeout"] > 0) and props["Powered"]:
KeyError: 'Discoverable'
|
TypeError
|
def get_properties(self):
param = GLib.Variant("(s)", (self._interface_name,))
res = self._call("GetAll", param, True)
prop_names = res[0].keys()
result = {}
for name in prop_names:
result[name] = self.get(name)
if result:
for k, v in self.__fallback.items():
if k in result:
continue
else:
result[k] = v
return result
|
def get_properties(self):
prop_names = self._dbus_proxy.get_cached_property_names()
result = {}
for name in prop_names:
result[name] = self._dbus_proxy.get_cached_property(name).unpack()
if result:
for k, v in self.__fallback.items():
if k in result:
continue
else:
result[k] = v
return result
|
https://github.com/blueman-project/blueman/issues/555
|
_________
_on_dbus_name_vanished (/usr/bin/blueman-applet:84)
org.bluez
_________
update_menuitems (/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py:116)
warning: Adapter is None
_________
on_unregistered (/usr/lib64/python3.5/site-packages/blueman/bluez/obex/AgentManager.py:28)
/org/bluez/obex/agent/blueman
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0
_________
on_adapter_added (/usr/bin/blueman-applet:99)
Adapter added /org/bluez/hci0
Traceback (most recent call last):
File "/usr/bin/blueman-applet", line 107, in on_adapter_added
wait_for_adapter(adapter, on_activate)
File "/usr/lib64/python3.5/site-packages/blueman/Functions.py", line 129, in wait_for_adapter
if props["Address"] != "00:00:00:00:00:00":
TypeError: 'NoneType' object is not subscriptable
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_00_1F_20_3A_89_89
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_00_1F_20_3A_89_89
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_6C_83_36_B9_C6_D6
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_6C_83_36_B9_C6_D6
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_00_07_61_48_7A_2C
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_00_07_61_48_7A_2C
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_40_F4_07_CB_E4_D7
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_40_F4_07_CB_E4_D7
_________
_on_object_added (/usr/lib64/python3.5/site-packages/blueman/bluez/Manager.py:45)
/org/bluez/hci0/dev_30_75_12_04_F7_2F
_________
on_device_created (/usr/bin/blueman-applet:113)
Device created /org/bluez/hci0/dev_30_75_12_04_F7_2F
_________
_on_dbus_name_appeared (/usr/bin/blueman-applet:78)
org.bluez :1.336
_________
Run (/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py:208)
Function on_manager_state_changed on DiscvManager Failed
Traceback (most recent call last):
File "/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py", line 212, in Run
ret = getattr(inst, function)(*args, **kwargs)
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 57, in on_manager_state_changed
self.update_menuitems()
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 123, in update_menuitems
if (not props["Discoverable"] or props["DiscoverableTimeout"] > 0) and props["Powered"]:
TypeError: 'NoneType' object is not subscriptable
_________
set_nap (/usr/lib64/python3.5/site-packages/blueman/plugins/applet/Networking.py:72)
set nap True
_________
Run (/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py:208)
Function on_manager_state_changed on RecentConns Failed
Traceback (most recent call last):
File "/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py", line 212, in Run
ret = getattr(inst, function)(*args, **kwargs)
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/RecentConns.py", line 172, in on_manager_state_changed
self.Adapters[str(adapter.get_object_path())] = str(p["Address"])
TypeError: 'NoneType' object is not subscriptable
_________
register_agent (/usr/lib64/python3.5/site-packages/blueman/main/applet/BluezAgent.py:35)
_________
on_adapter_property_changed (/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py:89)
prop UUIDs ['00001801-0000-1000-8000-00805f9b34fb', '0000110e-0000-1000-8000-00805f9b34fb', '00001106-0000-1000-8000-00805f9b34fb', '00001800-0000-1000-8000-00805f9b34fb', '00001105-0000-1000-8000-00805f9b34fb', '00001200-0000-1000-8000-00805f9b34fb', '0000110c-0000-1000-8000-00805f9b34fb', '00001104-0000-1000-8000-00805f9b34fb', '0000110a-0000-1000-8000-00805f9b34fb', '00001133-0000-1000-8000-00805f9b34fb', '0000112f-0000-1000-8000-00805f9b34fb', '00001132-0000-1000-8000-00805f9b34fb']
_________
Run (/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py:208)
Function on_adapter_property_changed on DiscvManager Failed
Traceback (most recent call last):
File "/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py", line 212, in Run
ret = getattr(inst, function)(*args, **kwargs)
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 114, in on_adapter_property_changed
self.update_menuitems()
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 123, in update_menuitems
if (not props["Discoverable"] or props["DiscoverableTimeout"] > 0) and props["Powered"]:
TypeError: 'NoneType' object is not subscriptable
_________
_on_properties_changed (/usr/lib64/python3.5/site-packages/blueman/bluez/PropertiesBase.py:38)
/org/bluez/hci0 UUIDs ['00001801-0000-1000-8000-00805f9b34fb', '0000110e-0000-1000-8000-00805f9b34fb', '00001106-0000-1000-8000-00805f9b34fb', '00001800-0000-1000-8000-00805f9b34fb', '00001105-0000-1000-8000-00805f9b34fb', '00001200-0000-1000-8000-00805f9b34fb', '0000110c-0000-1000-8000-00805f9b34fb', '00001104-0000-1000-8000-00805f9b34fb', '0000110a-0000-1000-8000-00805f9b34fb', '00001133-0000-1000-8000-00805f9b34fb', '0000112f-0000-1000-8000-00805f9b34fb', '00001132-0000-1000-8000-00805f9b34fb']
_________
on_adapter_property_changed (/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py:89)
prop UUIDs ['00001112-0000-1000-8000-00805f9b34fb', '00001801-0000-1000-8000-00805f9b34fb', '0000110e-0000-1000-8000-00805f9b34fb', '00001106-0000-1000-8000-00805f9b34fb', '00001800-0000-1000-8000-00805f9b34fb', '00001105-0000-1000-8000-00805f9b34fb', '00001200-0000-1000-8000-00805f9b34fb', '0000110c-0000-1000-8000-00805f9b34fb', '00001104-0000-1000-8000-00805f9b34fb', '0000110a-0000-1000-8000-00805f9b34fb', '0000110b-0000-1000-8000-00805f9b34fb', '00001133-0000-1000-8000-00805f9b34fb', '0000112f-0000-1000-8000-00805f9b34fb', '00001132-0000-1000-8000-00805f9b34fb']
_________
Run (/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py:208)
Function on_adapter_property_changed on DiscvManager Failed
Traceback (most recent call last):
File "/usr/lib64/python3.5/site-packages/blueman/main/PluginManager.py", line 212, in Run
ret = getattr(inst, function)(*args, **kwargs)
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 114, in on_adapter_property_changed
self.update_menuitems()
File "/usr/lib64/python3.5/site-packages/blueman/plugins/applet/DiscvManager.py", line 123, in update_menuitems
if (not props["Discoverable"] or props["DiscoverableTimeout"] > 0) and props["Powered"]:
KeyError: 'Discoverable'
|
TypeError
|
def __init__(self, applet):
self._applet = applet
self._config = Config("org.blueman.transfer")
self._agent_path = "/org/blueman/obex_agent"
self._agent = obex.Agent(self._agent_path)
self._agent.connect("release", self._on_release)
self._agent.connect("authorize", self._on_authorize)
self._agent.connect("cancel", self._on_cancel)
self._allowed_devices = []
self._notification = None
self._pending_transfer = None
self.transfers = {}
|
def __init__(self, applet):
self._applet = applet
self._config = Config("org.blueman.transfer")
self._agent_path = "/org/blueman/obex_agent"
self._agent = obex.Agent(self._agent_path)
self._agent.connect("release", self._on_release)
self._agent.connect("authorize", self._on_authorize)
self._agent.connect("cancel", self._on_cancel)
self._allowed_devices = []
self._notification = None
self._pending_transfer = None
self.transfers = {}
obex.AgentManager().register_agent(self._agent_path)
|
https://github.com/blueman-project/blueman/issues/441
|
_________
Unload (/usr/lib64/python3.4/site-packages/blueman/main/PluginManager.py:183)
Unloading TransferService
_________
__load_plugin (/usr/lib64/python3.4/site-packages/blueman/main/PluginManager.py:133)
loading <class 'blueman.plugins.applet.TransferService.TransferService'>
_________
_on_obex_owner_changed (/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py:164)
obex owner changed: :1.40
Traceback (most recent call last):
File "/usr/lib64/python3.4/site-packages/dbus/connection.py", line 604, in msg_reply_handler
reply_handler(*message.get_args_list(**get_args_opts))
File "/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py", line 169, in _on_obex_owner_changed
self._agent = _Agent(self._applet)
File "/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py", line 29, in __init__
self._agent = obex.Agent(self._agent_path)
File "/usr/lib64/python3.4/site-packages/blueman/bluez/obex/Agent.py", line 31, in __init__
dbus.service.Object.__init__(self, dbus.SessionBus(), agent_path)
File "/usr/lib64/python3.4/site-packages/dbus/service.py", line 485, in __init__
self.add_to_connection(conn, object_path)
File "/usr/lib64/python3.4/site-packages/dbus/service.py", line 576, in add_to_connection
self._fallback)
KeyError: "Can't register the object-path handler for '/org/blueman/obex_agent': there is already a handler"
|
KeyError
|
def on_unload(self):
if self._watch:
self._watch.cancel()
self._unregister_agent()
|
def on_unload(self):
if self._watch:
self._watch.cancel()
self._agent = None
|
https://github.com/blueman-project/blueman/issues/441
|
_________
Unload (/usr/lib64/python3.4/site-packages/blueman/main/PluginManager.py:183)
Unloading TransferService
_________
__load_plugin (/usr/lib64/python3.4/site-packages/blueman/main/PluginManager.py:133)
loading <class 'blueman.plugins.applet.TransferService.TransferService'>
_________
_on_obex_owner_changed (/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py:164)
obex owner changed: :1.40
Traceback (most recent call last):
File "/usr/lib64/python3.4/site-packages/dbus/connection.py", line 604, in msg_reply_handler
reply_handler(*message.get_args_list(**get_args_opts))
File "/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py", line 169, in _on_obex_owner_changed
self._agent = _Agent(self._applet)
File "/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py", line 29, in __init__
self._agent = obex.Agent(self._agent_path)
File "/usr/lib64/python3.4/site-packages/blueman/bluez/obex/Agent.py", line 31, in __init__
dbus.service.Object.__init__(self, dbus.SessionBus(), agent_path)
File "/usr/lib64/python3.4/site-packages/dbus/service.py", line 485, in __init__
self.add_to_connection(conn, object_path)
File "/usr/lib64/python3.4/site-packages/dbus/service.py", line 576, in add_to_connection
self._fallback)
KeyError: "Can't register the object-path handler for '/org/blueman/obex_agent': there is already a handler"
|
KeyError
|
def on_manager_state_changed(self, state):
if not state:
self._unregister_agent()
|
def on_manager_state_changed(self, state):
if not state:
self._agent = None
|
https://github.com/blueman-project/blueman/issues/441
|
_________
Unload (/usr/lib64/python3.4/site-packages/blueman/main/PluginManager.py:183)
Unloading TransferService
_________
__load_plugin (/usr/lib64/python3.4/site-packages/blueman/main/PluginManager.py:133)
loading <class 'blueman.plugins.applet.TransferService.TransferService'>
_________
_on_obex_owner_changed (/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py:164)
obex owner changed: :1.40
Traceback (most recent call last):
File "/usr/lib64/python3.4/site-packages/dbus/connection.py", line 604, in msg_reply_handler
reply_handler(*message.get_args_list(**get_args_opts))
File "/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py", line 169, in _on_obex_owner_changed
self._agent = _Agent(self._applet)
File "/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py", line 29, in __init__
self._agent = obex.Agent(self._agent_path)
File "/usr/lib64/python3.4/site-packages/blueman/bluez/obex/Agent.py", line 31, in __init__
dbus.service.Object.__init__(self, dbus.SessionBus(), agent_path)
File "/usr/lib64/python3.4/site-packages/dbus/service.py", line 485, in __init__
self.add_to_connection(conn, object_path)
File "/usr/lib64/python3.4/site-packages/dbus/service.py", line 576, in add_to_connection
self._fallback)
KeyError: "Can't register the object-path handler for '/org/blueman/obex_agent': there is already a handler"
|
KeyError
|
def _on_obex_owner_changed(self, owner):
dprint("obex owner changed:", owner)
if owner == "":
self._unregister_agent()
else:
self._register_agent()
|
def _on_obex_owner_changed(self, owner):
dprint("obex owner changed:", owner)
if owner == "":
self._agent = None
else:
self._agent = _Agent(self._applet)
|
https://github.com/blueman-project/blueman/issues/441
|
_________
Unload (/usr/lib64/python3.4/site-packages/blueman/main/PluginManager.py:183)
Unloading TransferService
_________
__load_plugin (/usr/lib64/python3.4/site-packages/blueman/main/PluginManager.py:133)
loading <class 'blueman.plugins.applet.TransferService.TransferService'>
_________
_on_obex_owner_changed (/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py:164)
obex owner changed: :1.40
Traceback (most recent call last):
File "/usr/lib64/python3.4/site-packages/dbus/connection.py", line 604, in msg_reply_handler
reply_handler(*message.get_args_list(**get_args_opts))
File "/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py", line 169, in _on_obex_owner_changed
self._agent = _Agent(self._applet)
File "/usr/lib64/python3.4/site-packages/blueman/plugins/applet/TransferService.py", line 29, in __init__
self._agent = obex.Agent(self._agent_path)
File "/usr/lib64/python3.4/site-packages/blueman/bluez/obex/Agent.py", line 31, in __init__
dbus.service.Object.__init__(self, dbus.SessionBus(), agent_path)
File "/usr/lib64/python3.4/site-packages/dbus/service.py", line 485, in __init__
self.add_to_connection(conn, object_path)
File "/usr/lib64/python3.4/site-packages/dbus/service.py", line 576, in add_to_connection
self._fallback)
KeyError: "Can't register the object-path handler for '/org/blueman/obex_agent': there is already a handler"
|
KeyError
|
def wait_for_adapter(bluez_adapter, callback, timeout=1000):
def on_prop_changed(adapter, key, value, _path):
if key == "Powered" and value:
GLib.source_remove(source)
adapter.disconnect_signal(sig)
callback()
def on_timeout():
bluez_adapter.disconnect_signal(sig)
GLib.source_remove(source)
dprint(
YELLOW("Warning:"),
"Bluez didn't provide 'Powered' property in a reasonable timeout\nAssuming adapter is ready",
)
callback()
props = bluez_adapter.get_properties()
if props["Address"] != "00:00:00:00:00:00":
callback()
return
source = GLib.timeout_add(timeout, on_timeout)
sig = bluez_adapter.connect_signal("property-changed", on_prop_changed)
|
def wait_for_adapter(bluez_adapter, callback, timeout=1000):
def on_prop_changed(adapter, key, value):
if key == "Powered" and value:
GLib.source_remove(source)
adapter.disconnect_signal(sig)
callback()
def on_timeout():
bluez_adapter.disconnect_signal(sig)
GLib.source_remove(source)
dprint(
YELLOW("Warning:"),
"Bluez didn't provide 'Powered' property in a reasonable timeout\nAssuming adapter is ready",
)
callback()
props = bluez_adapter.get_properties()
if props["Address"] != "00:00:00:00:00:00":
callback()
return
source = GLib.timeout_add(timeout, on_timeout)
sig = bluez_adapter.connect_signal("property-changed", on_prop_changed)
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def on_prop_changed(adapter, key, value, _path):
if key == "Powered" and value:
GLib.source_remove(source)
adapter.disconnect_signal(sig)
callback()
|
def on_prop_changed(adapter, key, value):
if key == "Powered" and value:
GLib.source_remove(source)
adapter.disconnect_signal(sig)
callback()
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _handle_signal(
self, handler, signal, interface_name=None, object_path=None, path_keyword=None
):
args = (
handler,
signal,
interface_name or self.__interface_name,
self.__bus_name,
object_path or self.__obj_path,
)
self.__bus.add_signal_receiver(*args, path_keyword=path_keyword)
self.__signals.append(args)
|
def _handle_signal(self, handler, signal, interface_name=None, object_path=None):
args = (
handler,
signal,
interface_name or self.__interface_name,
self.__bus_name,
object_path or self.__obj_path,
)
self.__bus.add_signal_receiver(*args)
self.__signals.append(args)
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def __init__(self, interface, obj_path):
super(PropertiesBase, self).__init__(interface, obj_path)
self._handler_wrappers = {}
if obj_path:
self.__properties_interface = dbus.Interface(
self._dbus_proxy, "org.freedesktop.DBus.Properties"
)
self._handle_signal(
self._on_properties_changed,
"PropertiesChanged",
"org.freedesktop.DBus.Properties",
path_keyword="path",
)
|
def __init__(self, interface, obj_path):
super(PropertiesBase, self).__init__(interface, obj_path)
self._handler_wrappers = {}
if obj_path:
self.__properties_interface = dbus.Interface(
self._dbus_proxy, "org.freedesktop.DBus.Properties"
)
self._handle_signal(
self._on_properties_changed,
"PropertiesChanged",
"org.freedesktop.DBus.Properties",
)
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_property_changed(self, key, value, path):
dprint(path, key, value)
self.emit("property-changed", key, value, path)
|
def _on_property_changed(self, key, value):
dprint(self.get_object_path(), key, value)
self.emit("property-changed", key, value)
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_properties_changed(
self, interface_name, changed_properties, _invalidated_properties, path
):
if interface_name == self._interface_name:
for name, value in changed_properties.items():
self._on_property_changed(name, value, path)
|
def _on_properties_changed(
self, interface_name, changed_properties, _invalidated_properties
):
if interface_name == self._interface_name:
for name, value in changed_properties.items():
self._on_property_changed(name, value)
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_property_changed(self, _adapter, key, value, _path):
if key == "Discovering":
if not value and self.discovering:
self.StopDiscovery()
self.emit("adapter-property-changed", self.Adapter, (key, value))
|
def _on_property_changed(self, _adapter, key, value):
if key == "Discovering":
if not value and self.discovering:
self.StopDiscovery()
self.emit("adapter-property-changed", self.Adapter, (key, value))
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_device_property_changed(self, _device, key, value, path):
iter = self.find_device_by_path(path)
if iter != None:
dev = self.get(iter, "device")["device"]
self.row_update_event(iter, key, value)
self.emit("device-property-changed", dev, iter, (key, value))
if key == "Connected":
if value:
self.monitor_power_levels(dev)
else:
r = Gtk.TreeRowReference.new(
self.get_model(), self.props.model.get_path(iter)
)
self.level_setup_event(r, dev, None)
elif key == "Paired":
if value and dev.Temp:
dev.Temp = False
|
def _on_device_property_changed(self, device, key, value):
iter = self.find_device_by_path(device.get_object_path())
if iter != None:
dev = self.get(iter, "device")["device"]
self.row_update_event(iter, key, value)
self.emit("device-property-changed", dev, iter, (key, value))
if key == "Connected":
if value:
self.monitor_power_levels(dev)
else:
r = Gtk.TreeRowReference.new(
self.get_model(), self.props.model.get_path(iter)
)
self.level_setup_event(r, dev, None)
elif key == "Paired":
if value and dev.Temp:
dev.Temp = False
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_service_property_changed(self, _service, key, _value, _path):
if key == "Connected":
self.Generate()
|
def _on_service_property_changed(self, _service, key, value):
if key == "Connected":
self.Generate()
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def __init__(self, instance):
GObject.GObject.__init__(self)
self.Properties = {}
self.Temp = False
if hasattr(instance, "format") and hasattr(instance, "upper"):
self.Device = BluezDevice(instance)
else:
self.Device = instance
# set fallback icon, fixes lp:#327718
self.Device.Icon = "blueman"
self.Device.Class = "unknown"
self.Valid = True
dprint("caching initial properties")
self.Properties = self.Device.get_properties()
w = weakref.ref(self)
self._obj_path = self.Device.get_object_path()
self.Device.connect_signal(
"property-changed",
lambda _device, key, value, _path: w() and w().property_changed(key, value),
)
self._manager = Manager()
self._manager.connect_signal(
"device-removed", lambda _adapter, path: w() and w().on_device_removed(path)
)
|
def __init__(self, instance):
GObject.GObject.__init__(self)
self.Properties = {}
self.Temp = False
if hasattr(instance, "format") and hasattr(instance, "upper"):
self.Device = BluezDevice(instance)
else:
self.Device = instance
# set fallback icon, fixes lp:#327718
self.Device.Icon = "blueman"
self.Device.Class = "unknown"
self.Valid = True
dprint("caching initial properties")
self.Properties = self.Device.get_properties()
w = weakref.ref(self)
self._obj_path = self.Device.get_object_path()
self.Device.connect_signal(
"property-changed",
lambda _device, key, value: w() and w().property_changed(key, value),
)
self._manager = Manager()
self._manager.connect_signal(
"device-removed", lambda _adapter, path: w() and w().on_device_removed(path)
)
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_network_prop_changed(self, _network, key, value, _path):
if key == "Interface":
if value != "":
self.dhcp_acquire(value)
|
def _on_network_prop_changed(self, _network, key, value):
if key == "Interface":
if value != "":
self.dhcp_acquire(value)
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_device_property_changed(self, _device, key, value, path):
if key == "Connected":
klass = bluez.Device(path).get_properties()["Class"] & 0x1FFF
if klass == 0x504 or klass == 0x508:
if value:
self.xdg_screensaver("suspend")
else:
self.xdg_screensaver("resume")
|
def _on_device_property_changed(self, device, key, value):
if key == "Connected":
klass = device.get_properties()["Class"] & 0x1FFF
if klass == 0x504 or klass == 0x508:
if value:
self.xdg_screensaver("suspend")
else:
self.xdg_screensaver("resume")
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_network_property_changed(self, _network, key, value, path):
if key == "Interface" and value != "":
d = BluezDevice(path)
d = Device(d)
self.monitor_interface(Monitor, d, value)
|
def _on_network_property_changed(self, network, key, value):
if key == "Interface" and value != "":
d = BluezDevice(network.get_object_path())
d = Device(d)
self.monitor_interface(Monitor, d, value)
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_adapter_property_changed(self, _adapter, key, value, _path):
if key == "Powered":
if value and not self.CurrentState:
dprint("adapter powered on while in off state, turning bluetooth on")
self.RequestPowerState(True)
self.UpdatePowerState()
|
def _on_adapter_property_changed(self, _adapter, key, value):
if key == "Powered":
if value and not self.CurrentState:
dprint("adapter powered on while in off state, turning bluetooth on")
self.RequestPowerState(True)
self.UpdatePowerState()
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_device_property_changed(self, _device, key, value, path):
if key == "Connected" and not value:
self.terminate_all_scripts(Device(path).Address)
|
def _on_device_property_changed(self, device, key, value):
if key == "Connected" and not value:
self.terminate_all_scripts(device.Address)
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def _on_device_property_changed(self, _device, key, value, _path):
if key == "Connected":
if value:
self.num_connections += 1
else:
self.num_connections -= 1
if (self.num_connections > 0 and not self.active) or (
self.num_connections == 0 and self.active
):
self.Applet.Plugins.StatusIcon.IconShouldChange()
self.update_statusicon()
|
def _on_device_property_changed(self, _device, key, value):
if key == "Connected":
if value:
self.num_connections += 1
else:
self.num_connections -= 1
if (self.num_connections > 0 and not self.active) or (
self.num_connections == 0 and self.active
):
self.Applet.Plugins.StatusIcon.IconShouldChange()
self.update_statusicon()
|
https://github.com/blueman-project/blueman/issues/341
|
_________
_on_property_changed (/usr/lib64/python2.7/site-packages/blueman/bluez/PropertiesBase.py:28)
None Connected 0
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 59, in _on_device_property_changed
self.terminate_all_scripts(device.Address)
AttributeError: 'Device' object has no attribute 'Address'
|
AttributeError
|
def connected(self):
try:
return self._service.get_properties()["Connected"]
except DBusException as e:
dprint("Could not get properties of network service: %s" % e)
return False
|
def connected(self):
return self._service.get_properties()["Connected"]
|
https://github.com/blueman-project/blueman/issues/208
|
Run (/usr/local/lib/python2.7/site-packages/blueman/main/PluginManager.py:203)
Function on_request_menu_items on Services Failed
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/blueman/main/PluginManager.py", line 207, in Run
ret = getattr(inst, function)(args, *kwargs)
File "/usr/local/lib/python2.7/site-packages/blueman/plugins/manager/Services.py", line 54, in on_request_menu_items
add_menu_item(manager_menu, service)
File "/usr/local/lib/python2.7/site-packages/blueman/plugins/manager/Services.py", line 29, in add_menu_item
if service.connected:
File "/usr/local/lib/python2.7/site-packages/blueman/services/meta/NetworkService.py", line 12, in connected
return self._service.get_properties()['Connected']
File "/usr/local/lib/python2.7/site-packages/blueman/bluez/errors.py", line 143, in warp
raise parse_dbus_error(exception)
DBusException: org.freedesktop.DBus.Error.InvalidArgs: No such interface 'org.bluez.Network1'
|
DBusException
|
def Generate(self):
self.clear()
appl = AppletService()
items = []
if not self.is_popup or self.props.visible:
selected = self.Blueman.List.selected()
if not selected:
return
device = self.Blueman.List.get(selected, "device")["device"]
else:
(x, y) = self.Blueman.List.get_pointer()
path = self.Blueman.List.get_path_at_pos(x, y)
if path != None:
device = self.Blueman.List.get(path[0], "device")["device"]
else:
return
if not device.Valid:
return
self.SelectedDevice = device
op = self.get_op(device)
if op != None:
item = create_menuitem(op, get_icon("network-transmit-recieve", 16))
item.props.sensitive = False
item.show()
self.append(item)
return
rets = self.Blueman.Plugins.Run("on_request_menu_items", self, device)
for ret in rets:
if ret:
for item, pos in ret:
items.append((pos, item))
if device.Fake:
item = create_menuitem(_("_Add Device"), get_icon("list-add", 16))
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.add_device(device)
)
item.show()
self.append(item)
item.props.tooltip_text = _("Add this device to known devices list")
item = create_menuitem(_("_Setup..."), get_icon("document-properties", 16))
self.append(item)
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.setup(device)
)
item.show()
item.props.tooltip_text = _("Run the setup assistant for this device")
item = create_menuitem(_("_Pair"), get_icon("dialog-password", 16))
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.bond(device)
)
self.append(item)
item.show()
item.props.tooltip_text = _("Pair with the device")
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
send_item = create_menuitem(_("Send a _File..."), get_icon("edit-copy", 16))
self.Signals.Handle(
"gobject", send_item, "activate", lambda x: self.Blueman.send(device)
)
send_item.show()
self.append(send_item)
else:
dprint(device.Alias)
item = None
have_disconnectables = False
have_connectables = False
if True in map(lambda x: x[0] >= 100 and x[0] < 200, items):
have_disconnectables = True
if True in map(lambda x: x[0] < 100, items):
have_connectables = True
if True in map(lambda x: x[0] >= 200, items) and (
have_connectables or have_disconnectables
):
item = Gtk.SeparatorMenuItem()
item.show()
items.append((199, item))
if have_connectables:
item = Gtk.MenuItem()
label = Gtk.Label()
label.set_markup(_("<b>Connect To:</b>"))
label.props.xalign = 0.0
label.show()
item.add(label)
item.props.sensitive = False
item.show()
items.append((0, item))
if have_disconnectables:
item = Gtk.MenuItem()
label = Gtk.Label()
label.set_markup(_("<b>Disconnect:</b>"))
label.props.xalign = 0.0
label.show()
item.add(label)
item.props.sensitive = False
item.show()
items.append((99, item))
items.sort(key=itemgetter(0))
for priority, item in items:
self.append(item)
if items != []:
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
del items
send_item = create_menuitem(_("Send a _File..."), get_icon("edit-copy", 16))
send_item.props.sensitive = False
self.append(send_item)
send_item.show()
browse_item = create_menuitem(
_("_Browse Device..."), get_icon("document-open", 16)
)
browse_item.props.sensitive = False
self.append(browse_item)
browse_item.show()
uuids = device.UUIDs
for uuid in uuids:
uuid16 = uuid128_to_uuid16(uuid)
if uuid16 == OBEX_OBJPUSH_SVCLASS_ID:
self.Signals.Handle(
"gobject",
send_item,
"activate",
lambda x: self.Blueman.send(device),
)
send_item.props.sensitive = True
if uuid16 == OBEX_FILETRANS_SVCLASS_ID:
self.Signals.Handle(
"gobject",
browse_item,
"activate",
lambda x: self.Blueman.browse(device),
)
browse_item.props.sensitive = True
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
item = create_menuitem(_("_Pair"), get_icon("dialog-password", 16))
item.props.tooltip_text = _("Create pairing with the device")
self.append(item)
item.show()
if not device.Paired:
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.bond(device)
)
else:
item.props.sensitive = False
if not device.Trusted:
item = create_menuitem(_("_Trust"), get_icon("blueman-trust", 16))
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.toggle_trust(device)
)
self.append(item)
item.show()
else:
item = create_menuitem(_("_Untrust"), get_icon("blueman-untrust", 16))
self.append(item)
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.toggle_trust(device)
)
item.show()
item.props.tooltip_text = _("Mark/Unmark this device as trusted")
item = create_menuitem(_("_Setup..."), get_icon("document-properties", 16))
self.append(item)
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.setup(device)
)
item.show()
item.props.tooltip_text = _("Run the setup assistant for this device")
def on_rename(_item, device):
def on_response(dialog, response_id):
if response_id == Gtk.ResponseType.ACCEPT:
device.set("Alias", alias_entry.get_text())
dialog.destroy()
builder = Gtk.Builder()
builder.set_translation_domain("blueman")
builder.add_from_file(UI_PATH + "/rename-device.ui")
dialog = builder.get_object("dialog")
dialog.set_transient_for(self.Blueman.window)
dialog.props.icon_name = "blueman"
alias_entry = builder.get_object("alias_entry")
alias_entry.set_text(device.Alias)
dialog.connect("response", on_response)
dialog.present()
item = Gtk.MenuItem.new_with_label("Rename device...")
self.Signals.Handle(item, "activate", on_rename, device)
self.append(item)
item.show()
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
item = create_menuitem(_("_Remove..."), get_icon("edit-delete", 16))
self.Signals.Handle(item, "activate", lambda x: self.Blueman.remove(device))
self.append(item)
item.show()
item.props.tooltip_text = _("Remove this device from the known devices list")
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
item = create_menuitem(_("_Disconnect"), get_icon("network-offline", 16))
item.props.tooltip_text = _("Forcefully disconnect the device")
self.append(item)
item.show()
def on_disconnect(item):
def finished(*args):
self.unset_op(device)
self.set_op(device, _("Disconnecting..."))
self.Blueman.disconnect(
device, reply_handler=finished, error_handler=finished
)
if device.Connected:
self.Signals.Handle(item, "activate", on_disconnect)
else:
item.props.sensitive = False
|
def Generate(self):
self.clear()
appl = AppletService()
items = []
if not self.is_popup or self.props.visible:
selected = self.Blueman.List.selected()
if not selected:
return
device = self.Blueman.List.get(selected, "device")["device"]
else:
(x, y) = self.Blueman.List.get_pointer()
path = self.Blueman.List.get_path_at_pos(x, y)
if path != None:
device = self.Blueman.List.get(path[0], "device")["device"]
else:
return
if not device.Valid:
return
self.SelectedDevice = device
op = self.get_op(device)
if op != None:
item = create_menuitem(op, get_icon("network-transmit-recieve", 16))
item.props.sensitive = False
item.show()
self.append(item)
return
rets = self.Blueman.Plugins.Run("on_request_menu_items", self, device)
for ret in rets:
if ret:
for item, pos in ret:
items.append((pos, item))
if device.Fake:
item = create_menuitem(_("_Add Device"), get_icon("list-add", 16))
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.add_device(device)
)
item.show()
self.append(item)
item.props.tooltip_text = _("Add this device to known devices list")
item = create_menuitem(_("_Setup..."), get_icon("document-properties", 16))
self.append(item)
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.setup(device)
)
item.show()
item.props.tooltip_text = _("Run the setup assistant for this device")
item = create_menuitem(_("_Pair"), get_icon("dialog-password", 16))
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.bond(device)
)
self.append(item)
item.show()
item.props.tooltip_text = _("Pair with the device")
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
send_item = create_menuitem(_("Send a _File..."), get_icon("edit-copy", 16))
self.Signals.Handle(
"gobject", send_item, "activate", lambda x: self.Blueman.send(device)
)
send_item.show()
self.append(send_item)
else:
dprint(device.Alias)
item = None
have_disconnectables = False
have_connectables = False
if True in map(lambda x: x[0] >= 100 and x[0] < 200, items):
have_disconnectables = True
if True in map(lambda x: x[0] < 100, items):
have_connectables = True
if True in map(lambda x: x[0] >= 200, items) and (
have_connectables or have_disconnectables
):
item = Gtk.SeparatorMenuItem()
item.show()
items.append((199, item))
if have_connectables:
item = Gtk.MenuItem()
label = Gtk.Label()
label.set_markup(_("<b>Connect To:</b>"))
label.props.xalign = 0.0
label.show()
item.add(label)
item.props.sensitive = False
item.show()
items.append((0, item))
if have_disconnectables:
item = Gtk.MenuItem()
label = Gtk.Label()
label.set_markup(_("<b>Disconnect:</b>"))
label.props.xalign = 0.0
label.show()
item.add(label)
item.props.sensitive = False
item.show()
items.append((99, item))
items.sort(key=itemgetter(0))
for priority, item in items:
self.append(item)
if items != []:
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
del items
send_item = create_menuitem(_("Send a _File..."), get_icon("edit-copy", 16))
send_item.props.sensitive = False
self.append(send_item)
send_item.show()
browse_item = create_menuitem(
_("_Browse Device..."), get_icon("document-open", 16)
)
browse_item.props.sensitive = False
self.append(browse_item)
browse_item.show()
uuids = device.UUIDs
for uuid in uuids:
uuid16 = uuid128_to_uuid16(uuid)
if uuid16 == OBEX_OBJPUSH_SVCLASS_ID:
self.Signals.Handle(
"gobject",
send_item,
"activate",
lambda x: self.Blueman.send(device),
)
send_item.props.sensitive = True
if uuid16 == OBEX_FILETRANS_SVCLASS_ID:
self.Signals.Handle(
"gobject",
browse_item,
"activate",
lambda x: self.Blueman.browse(device),
)
browse_item.props.sensitive = True
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
item = create_menuitem(_("_Pair"), get_icon("dialog-password", 16))
item.props.tooltip_text = _("Create pairing with the device")
self.append(item)
item.show()
if not device.Paired:
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.bond(device)
)
else:
item.props.sensitive = False
if not device.Trusted:
item = create_menuitem(_("_Trust"), get_icon("blueman-trust", 16))
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.toggle_trust(device)
)
self.append(item)
item.show()
else:
item = create_menuitem(_("_Untrust"), get_icon("blueman-untrust", 16))
self.append(item)
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.toggle_trust(device)
)
item.show()
item.props.tooltip_text = _("Mark/Unmark this device as trusted")
item = create_menuitem(_("_Setup..."), get_icon("document-properties", 16))
self.append(item)
self.Signals.Handle(
"gobject", item, "activate", lambda x: self.Blueman.setup(device)
)
item.show()
item.props.tooltip_text = _("Run the setup assistant for this device")
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
item = create_menuitem(_("_Remove..."), get_icon("edit-delete", 16))
self.Signals.Handle(item, "activate", lambda x: self.Blueman.remove(device))
self.append(item)
item.show()
item.props.tooltip_text = _("Remove this device from the known devices list")
item = Gtk.SeparatorMenuItem()
item.show()
self.append(item)
item = create_menuitem(_("_Disconnect"), get_icon("network-offline", 16))
item.props.tooltip_text = _("Forcefully disconnect the device")
self.append(item)
item.show()
def on_disconnect(item):
def finished(*args):
self.unset_op(device)
self.set_op(device, _("Disconnecting..."))
self.Blueman.disconnect(
device, reply_handler=finished, error_handler=finished
)
if device.Connected:
self.Signals.Handle(item, "activate", on_disconnect)
else:
item.props.sensitive = False
|
https://github.com/blueman-project/blueman/issues/117
|
Nov 02 22:27:54 zamanjaro org.gtk.vfs.Daemon[10692]: ** (process:10958): WARNING **: send_done_cb: No such interface 'org.gtk.vfs.Enumerator' on object at path /org/gtk/vfs/client/enumerator/1 (g-dbus-error-quark, 19)
Nov 02 22:45:57 zamanjaro org.gnome.Terminal[10692]: ** (gnome-terminal-server:3182): WARNING **: Couldn't register with accessibility bus: Did not receive a reply. Possible causes include: the remote application did not send a reply, the message bus security policy blocked the reply, the reply timeout expired, or the network connection was broken.
Nov 02 23:37:27 zamanjaro org.openobex[10692]: /usr/bin/obex-data-server: error while loading shared libraries: libMagickWand-6.Q16HDRI.so.1: cannot open shared object file: No such file or directory
Nov 02 23:41:14 zamanjaro org.blueman.Applet[10692]: ** (blueman-applet:10525): WARNING **: Couldn't register with accessibility bus: Did not receive a reply. Possible causes include: the remote application did not send a reply, the message bus security policy blocked the reply, the reply timeout expired, or the network connection was broken.
Nov 02 23:41:14 zamanjaro org.blueman.Applet[10692]: ERROR:root:Could not find any typelib for AppIndicator3
Nov 02 23:41:14 zamanjaro org.openobex[10692]: /usr/bin/obex-data-server: error while loading shared libraries: libMagickWand-6.Q16HDRI.so.1: cannot open shared object file: No such file or directory
Nov 02 23:41:14 zamanjaro org.blueman.Applet[10692]: Exception AttributeError: "'NoneType' object has no attribute 'Signals'" in <bound method Device.__del__ of <Device object at 0x7fbb38f0abe0 (blueman+main+Device+Device at 0xcba340)>> ignored
Nov 02 23:41:14 zamanjaro org.blueman.Applet[10692]: Exception AttributeError: "'NoneType' object has no attribute 'Signals'" in <bound method Device.__del__ of <Device object at 0x7fbb38f0a960 (blueman+main+Device+Device at 0xcba2c0)>> ignored
Nov 02 23:41:14 zamanjaro org.blueman.Applet[10692]: Exception AttributeError: "'NoneType' object has no attribute 'Signals'" in <bound method Device.__del__ of <Device object at 0x7fbb38f0a960 (blueman+main+Device+Device at 0xcaf5a0)>> ignored
Nov 02 23:41:14 zamanjaro org.blueman.Applet[10692]: Exception AttributeError: "'NoneType' object has no attribute 'Signals'" in <bound method Device.__del__ of <Device object at 0x7fbb38f0ac80 (blueman+main+Device+Device at 0xcba600)>> ignored
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: ERROR:dbus.connection:Exception in handler for D-Bus signal:
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: Traceback (most recent call last):
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/dbus/connection.py", line 230, in maybe_handle_message
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: self._handler(*args, **kwargs)
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/bluez/PropertiesBlueZInterface.py", line 51, in wrapper
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: handler(name, value, **kwargs)
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 48, in on_device_property_changed
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: d = Device(path)
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/main/Device.py", line 39, in __init__
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: self.Properties = self.Device.get_properties()
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/bluez/errors.py", line 143, in warp
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: raise parse_dbus_error(exception)
Nov 02 23:41:21 zamanjaro org.blueman.Applet[10692]: DBusException: org.freedesktop.DBus.Error.UnknownObject: Method "GetAll" with signature "s" on interface "org.freedesktop.DBus.Properties" doesn't exist
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: ERROR:dbus.connection:Exception in handler for D-Bus signal:
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: Traceback (most recent call last):
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/dbus/connection.py", line 230, in maybe_handle_message
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: self._handler(*args, **kwargs)
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/bluez/PropertiesBlueZInterface.py", line 51, in wrapper
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: handler(name, value, **kwargs)
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 48, in on_device_property_changed
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: d = Device(path)
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/main/Device.py", line 39, in __init__
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: self.Properties = self.Device.get_properties()
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/bluez/errors.py", line 143, in warp
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: raise parse_dbus_error(exception)
Nov 02 23:41:24 zamanjaro org.blueman.Applet[10692]: DBusException: org.freedesktop.DBus.Error.UnknownObject: Method "GetAll" with signature "s" on interface "org.freedesktop.DBus.Properties" doesn't exist
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: ERROR:dbus.connection:Exception in handler for D-Bus signal:
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: Traceback (most recent call last):
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/dbus/connection.py", line 230, in maybe_handle_message
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: self._handler(*args, **kwargs)
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/bluez/PropertiesBlueZInterface.py", line 51, in wrapper
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: handler(name, value, **kwargs)
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 48, in on_device_property_changed
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: d = Device(path)
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/main/Device.py", line 39, in __init__
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: self.Properties = self.Device.get_properties()
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/bluez/errors.py", line 143, in warp
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: raise parse_dbus_error(exception)
Nov 02 23:41:32 zamanjaro org.blueman.Applet[10692]: DBusException: org.freedesktop.DBus.Error.UnknownObject: Method "GetAll" with signature "s" on interface "org.freedesktop.DBus.Properties" doesn't exist
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: ERROR:dbus.connection:Exception in handler for D-Bus signal:
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: Traceback (most recent call last):
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/dbus/connection.py", line 230, in maybe_handle_message
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: self._handler(*args, **kwargs)
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/bluez/PropertiesBlueZInterface.py", line 51, in wrapper
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: handler(name, value, **kwargs)
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/plugins/applet/SerialManager.py", line 48, in on_device_property_changed
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: d = Device(path)
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/main/Device.py", line 39, in __init__
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: self.Properties = self.Device.get_properties()
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: File "/usr/lib/python2.7/site-packages/blueman/bluez/errors.py", line 143, in warp
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: raise parse_dbus_error(exception)
Nov 02 23:41:36 zamanjaro org.blueman.Applet[10692]: DBusException: org.freedesktop.DBus.Error.UnknownObject: Method "GetAll" with signature "s" on interface "org.freedesktop.DBus.Properties" doesn't exist
Nov 02 23:41:41 zamanjaro org.openobex[10692]: /usr/bin/obex-data-server: error while loading shared libraries: libMagickWand-6.Q16HDRI.so.1: cannot open shared object file: No such file or directory
Nov 02 23:43:39 zamanjaro org.openobex[10692]: /usr/bin/obex-data-server: error while loading shared libraries: libMagickWand-6.Q16HDRI.so.1: cannot open shared object file: No such file or directory
Nov 02 23:53:52 zamanjaro org.openobex[10692]: /usr/bin/obex-data-server: error while loading shared libraries: libMagickWand-6.Q16HDRI.so.1: cannot open shared object file: No such file or directory
|
AttributeError
|
def DisplayPasskey(self, device, passkey, entered):
dprint("DisplayPasskey (%s, %d)" % (device, passkey))
notify_message = _("Pairing passkey for") + " %s: %s" % (
self.get_device_alias(device),
passkey,
)
self.n = Notification(
"Bluetooth",
notify_message,
0,
pixbuf=get_icon("blueman", 48),
status_icon=self.status_icon,
)
|
def DisplayPasskey(self, device, passkey, entered):
dprint("DisplayPasskey (%s, %d)" % (device, passkey))
notify_message = _("Pairing passkey for") + " %s: %s" % (
self.get_device_alias(device),
passkey,
)
Notification(
"Bluetooth",
notify_message,
0,
pixbuf=get_icon("blueman", 48),
status_icon=self.status_icon,
)
|
https://github.com/blueman-project/blueman/issues/120
|
aep-haswell bluetoothd[655]: Agent replied with an error: org.freedesktop.DBus.Error.UnknownMethod, Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/dbus/service.py", line 654, in _message_cb
(candidate_method, parent_method) = _method_lookup(self, method_name, interface_name)
File "/usr/lib64/python2.7/site-packages/dbus/service.py", line 246, in _method_lookup
raise UnknownMethodException('%s is not a valid method of interface %s' % (method_name, dbus_interface))
UnknownMethodException: org.freedesktop.DBus.Error.UnknownMethod: Unknown method: DisplayPinCode is not a valid method of interface org.bluez.Agent1
|
UnknownMethodException
|
def DisplayPinCode(self, device, pin_code):
dprint("DisplayPinCode (%s, %s)" % (device, pin_code))
notify_message = _("Pairing PIN code for") + " %s: %s" % (
self.get_device_alias(device),
pin_code,
)
self.n = Notification(
"Bluetooth",
notify_message,
0,
pixbuf=get_icon("blueman", 48),
status_icon=self.status_icon,
)
|
def DisplayPinCode(self, device, pin_code):
dprint("DisplayPinCode (%s, %s)" % (device, pin_code))
notify_message = _("Pairing PIN code for") + " %s: %s" % (
self.get_device_alias(device),
pin_code,
)
Notification(
"Bluetooth",
notify_message,
0,
pixbuf=get_icon("blueman", 48),
status_icon=self.status_icon,
)
|
https://github.com/blueman-project/blueman/issues/120
|
aep-haswell bluetoothd[655]: Agent replied with an error: org.freedesktop.DBus.Error.UnknownMethod, Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/dbus/service.py", line 654, in _message_cb
(candidate_method, parent_method) = _method_lookup(self, method_name, interface_name)
File "/usr/lib64/python2.7/site-packages/dbus/service.py", line 246, in _method_lookup
raise UnknownMethodException('%s is not a valid method of interface %s' % (method_name, dbus_interface))
UnknownMethodException: org.freedesktop.DBus.Error.UnknownMethod: Unknown method: DisplayPinCode is not a valid method of interface org.bluez.Agent1
|
UnknownMethodException
|
def on_adapter_property_changed(self, list, adapter, kv):
(key, value) = kv
if key == "Name" or key == "Alias":
self.generate_adapter_menu()
elif key == "Discovering":
if self.Search:
if value:
self.Search.props.sensitive = False
else:
self.Search.props.sensitive = True
|
def on_adapter_property_changed(self, list, adapter, kv):
(key, value) = kv
if key == "Name":
self.generate_adapter_menu()
elif key == "Discovering":
if self.Search:
if value:
self.Search.props.sensitive = False
else:
self.Search.props.sensitive = True
|
https://github.com/blueman-project/blueman/issues/104
|
Traceback (most recent call last):
File "/usr/bin/blueman-adapters", line 78, in on_dialog_response
adapter.SetProperty('Name', settings['name'])
AttributeError: 'Adapter' object has no attribute 'SetProperty'
|
AttributeError
|
def generate_adapter_menu(self):
menu = Gtk.Menu()
sep = Gtk.SeparatorMenuItem()
sep.show()
menu.append(sep)
settings = Gtk.ImageMenuItem.new_from_stock("gtk-preferences", None)
settings.connect("activate", lambda x: self.blueman.adapter_properties())
settings.show()
menu.append(settings)
group = []
for adapter in self.adapters:
item = Gtk.RadioMenuItem.new_with_label(group, adapter.get_name())
group = item.get_group()
item.connect("activate", self.on_adapter_selected, adapter.get_object_path())
if adapter.get_object_path() == self.blueman.List.Adapter.get_object_path():
item.props.active = True
item.show()
menu.prepend(item)
sep = Gtk.SeparatorMenuItem()
sep.show()
menu.prepend(sep)
item = create_menuitem(_("_Search"), get_icon("gtk-find", 16))
item.connect("activate", lambda x: self.blueman.inquiry())
item.show()
menu.prepend(item)
self.Search = item
m = self.item_adapter.get_submenu()
if m != None:
m.deactivate()
self.item_adapter.set_submenu(menu)
sep = Gtk.SeparatorMenuItem()
sep.show()
menu.append(sep)
item = Gtk.ImageMenuItem.new_from_stock("gtk-quit", None)
item.connect("activate", lambda x: Gtk.main_quit())
item.show()
menu.append(item)
|
def generate_adapter_menu(self):
menu = Gtk.Menu()
sep = Gtk.SeparatorMenuItem()
sep.show()
menu.append(sep)
settings = Gtk.ImageMenuItem.new_from_stock("gtk-preferences", None)
settings.connect("activate", lambda x: self.blueman.adapter_properties())
settings.show()
menu.append(settings)
group = []
for adapter in self.adapters:
props = adapter.get_properties()
item = Gtk.RadioMenuItem.new_with_label(group, props["Name"])
group = item.get_group()
item.connect("activate", self.on_adapter_selected, adapter.get_object_path())
if adapter.get_object_path() == self.blueman.List.Adapter.get_object_path():
item.props.active = True
item.show()
menu.prepend(item)
sep = Gtk.SeparatorMenuItem()
sep.show()
menu.prepend(sep)
item = create_menuitem(_("_Search"), get_icon("gtk-find", 16))
item.connect("activate", lambda x: self.blueman.inquiry())
item.show()
menu.prepend(item)
self.Search = item
m = self.item_adapter.get_submenu()
if m != None:
m.deactivate()
self.item_adapter.set_submenu(menu)
sep = Gtk.SeparatorMenuItem()
sep.show()
menu.append(sep)
item = Gtk.ImageMenuItem.new_from_stock("gtk-quit", None)
item.connect("activate", lambda x: Gtk.main_quit())
item.show()
menu.append(item)
|
https://github.com/blueman-project/blueman/issues/104
|
Traceback (most recent call last):
File "/usr/bin/blueman-adapters", line 78, in on_dialog_response
adapter.SetProperty('Name', settings['name'])
AttributeError: 'Adapter' object has no attribute 'SetProperty'
|
AttributeError
|
def mackinnoncrit(
num_unit_roots: int = 1,
regression: str = "c",
nobs: float = inf,
dist_type: str = "ADF-t",
) -> NDArray:
"""
Returns the critical values for cointegrating and the ADF test.
In 2010 MacKinnon updated the values of his 1994 paper with critical values
for the augmented Dickey-Fuller bootstrap. These new values are to be
preferred and are used here.
Parameters
----------
num_unit_roots : int
The number of series of I(1) series for which the null of
non-cointegration is being tested. For N > 12, the critical values
are linearly interpolated (not yet implemented). For the ADF test,
N = 1.
regression : {'c', 'tc', 'ctt', 'nc'}, optional
Following MacKinnon (1996), these stand for the type of regression run.
'c' for constant and no trend, 'tc' for constant with a linear trend,
'ctt' for constant with a linear and quadratic trend, and 'nc' for
no constant. The values for the no constant case are taken from the
1996 paper, as they were not updated for 2010 due to the unrealistic
assumptions that would underlie such a case.
nobs : {int, np.inf}, optional
This is the sample size. If the sample size is numpy.inf, then the
asymptotic critical values are returned.
dist_type : {'adf-t', 'adf-z', 'dfgls'}, optional
Type of test statistic
Returns
-------
crit_vals : ndarray
Three critical values corresponding to 1%, 5% and 10% cut-offs.
Notes
-----
Results for ADF t-stats from MacKinnon (1994,2010). Results for DFGLS and
ADF z-bootstrap use the same methodology as MacKinnon.
References
----------
MacKinnon, J.G. 1994 "Approximate Asymptotic Distribution Functions for
Unit-Root and Cointegration Tests." Journal of Business & Economics
Statistics, 12.2, 167-76.
MacKinnon, J.G. 2010. "Critical Values for Cointegration Tests."
Queen's University, Dept of Economics Working Papers 1227.
https://ideas.repec.org/p/qed/wpaper/1227.html
"""
dist_type = dist_type.lower()
valid_regression = ["c", "ct", "n", "ctt"]
if dist_type == "dfgls":
valid_regression = ["c", "ct"]
if regression not in valid_regression:
raise ValueError("regression keyword {0} not understood".format(regression))
if dist_type == "adf-t":
asymptotic_cv = tau_2010[regression][num_unit_roots - 1, :, 0]
poly_coef = tau_2010[regression][num_unit_roots - 1, :, :].T
elif dist_type == "adf-z":
poly_coef = array(adf_z_cv_approx[regression]).T
asymptotic_cv = array(adf_z_cv_approx[regression])[:, 0]
elif dist_type == "dfgls":
poly_coef = dfgls_cv_approx[regression].T
asymptotic_cv = dfgls_cv_approx[regression][:, 0]
else:
raise ValueError("Unknown test type {0}".format(dist_type))
if nobs is inf:
return asymptotic_cv
else:
# Flip so that highest power to lowest power
return polyval(poly_coef[::-1], 1.0 / nobs)
|
def mackinnoncrit(
num_unit_roots: int = 1,
regression: str = "c",
nobs: float = inf,
dist_type: str = "ADF-t",
) -> NDArray:
"""
Returns the critical values for cointegrating and the ADF test.
In 2010 MacKinnon updated the values of his 1994 paper with critical values
for the augmented Dickey-Fuller bootstrap. These new values are to be
preferred and are used here.
Parameters
----------
num_unit_roots : int
The number of series of I(1) series for which the null of
non-cointegration is being tested. For N > 12, the critical values
are linearly interpolated (not yet implemented). For the ADF test,
N = 1.
regression : {'c', 'tc', 'ctt', 'nc'}, optional
Following MacKinnon (1996), these stand for the type of regression run.
'c' for constant and no trend, 'tc' for constant with a linear trend,
'ctt' for constant with a linear and quadratic trend, and 'nc' for
no constant. The values for the no constant case are taken from the
1996 paper, as they were not updated for 2010 due to the unrealistic
assumptions that would underlie such a case.
nobs : {int, np.inf}, optional
This is the sample size. If the sample size is numpy.inf, then the
asymptotic critical values are returned.
dist_type : {'adf-t', 'adf-z', 'dfgls'}, optional
Type of test statistic
Returns
-------
crit_vals : ndarray
Three critical values corresponding to 1%, 5% and 10% cut-offs.
Notes
-----
Results for ADF t-stats from MacKinnon (1994,2010). Results for DFGLS and
ADF z-bootstrap use the same methodology as MacKinnon.
References
----------
MacKinnon, J.G. 1994 "Approximate Asymptotic Distribution Functions for
Unit-Root and Cointegration Tests." Journal of Business & Economics
Statistics, 12.2, 167-76.
MacKinnon, J.G. 2010. "Critical Values for Cointegration Tests."
Queen's University, Dept of Economics Working Papers 1227.
https://ideas.repec.org/p/qed/wpaper/1227.html
"""
dist_type = dist_type.lower()
valid_regression = ["c", "ct", "n", "ctt"]
if dist_type == "dfgls":
valid_regression = ["c", "ct"]
if regression not in valid_regression:
raise ValueError("regression keyword {0} not understood".format(regression))
if dist_type == "adf-t":
asymptotic_cv = tau_2010[regression][num_unit_roots - 1, :, 0]
poly_coef = tau_2010[regression][num_unit_roots - 1, :, :].T
elif dist_type == "adf-z":
poly_coef = adf_z_cv_approx[regression].T
asymptotic_cv = adf_z_cv_approx[regression][:, 0]
elif dist_type == "dfgls":
poly_coef = dfgls_cv_approx[regression].T
asymptotic_cv = dfgls_cv_approx[regression][:, 0]
else:
raise ValueError("Unknown test type {0}".format(dist_type))
if nobs is inf:
return asymptotic_cv
else:
# Flip so that highest power to lowest power
return polyval(poly_coef[::-1], 1.0 / nobs)
|
https://github.com/bashtage/arch/issues/385
|
y = PhillipsPerron(default, trend='n', test_type='rho')
y.__str__()
Traceback (most recent call last):
File "<ipython-input-56-8cc462d9ab10>", line 1, in <module>
y.__str__()
File "C:\Users\u609587\AppData\Local\Continuum\anaconda3\lib\site-packages\arch\unitroot\unitroot.py", line 484, in __str__
return self.summary().__str__()
File "C:\Users\u609587\AppData\Local\Continuum\anaconda3\lib\site-packages\arch\unitroot\unitroot.py", line 570, in summary
("Test Statistic", "{0:0.3f}".format(self.stat)),
File "C:\Users\u609587\AppData\Local\Continuum\anaconda3\lib\site-packages\arch\unitroot\unitroot.py", line 554, in stat
self._compute_if_needed()
File "C:\Users\u609587\AppData\Local\Continuum\anaconda3\lib\site-packages\arch\unitroot\unitroot.py", line 517, in _compute_if_needed
self._compute_statistic()
File "C:\Users\u609587\AppData\Local\Continuum\anaconda3\lib\site-packages\arch\unitroot\unitroot.py", line 1171, in _compute_statistic
self._pvalue = mackinnonp(self._stat, regression=trend, dist_type=dist_type)
File "C:\Users\u609587\AppData\Local\Continuum\anaconda3\lib\site-packages\arch\unitroot\unitroot.py", line 1815, in mackinnonp
maxstat = adf_z_max[regression]
KeyError: 'n'
|
KeyError
|
def _redirect(self, to_fd, unbuffered=False, close=False):
if close:
fp = getattr(sys, self._stream)
# TODO(jhr): does this still work under windows? are we leaking a fd?
# Do not close old filedescriptor as others might be using it
try:
fp.close()
except Exception:
pass # Stream might be wrapped by another program which doesn't support closing.
os.dup2(to_fd, self._old_fd)
if self._io_wrapped:
if close:
setattr(sys, self._stream, getattr(sys, self._stream).output_streams[0])
else:
setattr(
sys,
self._stream,
StreamFork(
[getattr(sys, self._stream), os.fdopen(self._old_fd, "w")],
unbuffered=unbuffered,
),
)
else:
setattr(sys, self._stream, os.fdopen(self._old_fd, "w"))
if unbuffered:
setattr(sys, self._stream, Unbuffered(getattr(sys, self._stream)))
|
def _redirect(self, to_fd, unbuffered=False, close=False):
if close:
fp = getattr(sys, self._stream)
# TODO(jhr): does this still work under windows? are we leaking a fd?
# Do not close old filedescriptor as others might be using it
fp.close()
os.dup2(to_fd, self._old_fd)
if self._io_wrapped:
if close:
setattr(sys, self._stream, getattr(sys, self._stream).output_streams[0])
else:
setattr(
sys,
self._stream,
StreamFork(
[getattr(sys, self._stream), os.fdopen(self._old_fd, "w")],
unbuffered=unbuffered,
),
)
else:
setattr(sys, self._stream, os.fdopen(self._old_fd, "w"))
if unbuffered:
setattr(sys, self._stream, Unbuffered(getattr(sys, self._stream)))
|
https://github.com/wandb/client/issues/1321
|
Error in atexit._run_exitfuncs:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/wandb/sdk/wandb_run.py",
line 1172, in _atexit_cleanup
self._on_finish()
File "/opt/conda/lib/python3.7/site-packages/wandb/sdk/wandb_run.py",
line 1292, in _on_finish
self._console_stop() # TODO: there's a race here with jupyter console
logging
File "/opt/conda/lib/python3.7/site-packages/wandb/sdk/wandb_run.py",
line 1203, in _console_stop
self._restore()
File "/opt/conda/lib/python3.7/site-packages/wandb/sdk/wandb_run.py",
line 1140, in _restore
self._out_redir.uninstall()
File "/opt/conda/lib/python3.7/site-packages/wandb/lib/redirect.py",
line 207, in uninstall
self._redirect(to_fd=self._old_fp.fileno(), close=True)
File "/opt/conda/lib/python3.7/site-packages/wandb/lib/redirect.py",
line 161, in _redirect
fp.close()
AttributeError: 'TeeHandler' object has no attribute 'close'
|
AttributeError
|
def notebook_metadata():
"""Attempts to query jupyter for the path and name of the notebook file"""
error_message = "Failed to query for notebook name, you can set it manually with the WANDB_NOTEBOOK_NAME environment variable"
try:
import ipykernel
from notebook.notebookapp import list_running_servers
kernel_id = re.search(
"kernel-(.*).json", ipykernel.connect.get_connection_file()
).group(1)
servers = list(
list_running_servers()
) # TODO: sometimes there are invalid JSON files and this blows up
except Exception:
logger.error(error_message)
return {}
for s in servers:
try:
if s["password"]:
raise ValueError("Can't query password protected kernel")
res = requests.get(
urljoin(s["url"], "api/sessions"), params={"token": s.get("token", "")}
).json()
except (requests.RequestException, ValueError):
logger.error(error_message)
return {}
for nn in res:
# TODO: wandb/client#400 found a case where res returned an array of strings...
if isinstance(nn, dict) and nn.get("kernel") and "notebook" in nn:
if nn["kernel"]["id"] == kernel_id:
return {
"root": s["notebook_dir"],
"path": nn["notebook"]["path"],
"name": nn["notebook"]["name"],
}
return {}
|
def notebook_metadata():
"""Attempts to query jupyter for the path and name of the notebook file"""
error_message = "Failed to query for notebook name, you can set it manually with the WANDB_NOTEBOOK_NAME environment variable"
try:
import ipykernel
from notebook.notebookapp import list_running_servers
kernel_id = re.search(
"kernel-(.*).json", ipykernel.connect.get_connection_file()
).group(1)
servers = list(
list_running_servers()
) # TODO: sometimes there are invalid JSON files and this blows up
except Exception:
logger.error(error_message)
return {}
for s in servers:
try:
if s["password"]:
raise ValueError("Can't query password protected kernel")
res = requests.get(
urljoin(s["url"], "api/sessions"), params={"token": s.get("token", "")}
).json()
except (requests.RequestException, ValueError):
logger.error(error_message)
return {}
for nn in res:
# TODO: wandb/client#400 found a case where res returned an array of strings...
if isinstance(nn, dict) and nn.get("kernel"):
if nn["kernel"]["id"] == kernel_id:
return {
"root": s["notebook_dir"],
"path": nn["notebook"]["path"],
"name": nn["notebook"]["name"],
}
return {}
|
https://github.com/wandb/client/issues/678
|
AttributeError Traceback (most recent call last)
C:\Python37\lib\site-packages\wandb\meta.py in setup(self)
46 import __main__
---> 47 self.data["program"] = __main__.__file__
48 except (ImportError, AttributeError):
AttributeError: module '__main__' has no attribute '__file__'
During handling of the above exception, another exception occurred:
KeyError Traceback (most recent call last)
in
1 import wandb
2 from wandb.keras import WandbCallback
----> 3 wandb.init(project="my_project_name")
C:\Python37\lib\site-packages\wandb\__init__.py in init(job_type, dir, config, project, entity, reinit, tags, group, allow_val_change, resume, force, tensorboard, sync_tensorboard, monitor_gym, name, notes, id, magic, anonymous)
1068 allow_val_change = True
1069 if config or telemetry_updated:
-> 1070 run.config._update(config, allow_val_change=allow_val_change, as_defaults=not allow_val_change)
1071
1072 # Access history to ensure resumed is set when resuming
C:\Python37\lib\site-packages\wandb\wandb_config.py in _update(self, params, allow_val_change, as_defaults)
276 continue
277 self._items[key] = val
--> 278 self.persist()
279
280 def update(self, params, allow_val_change=False):
C:\Python37\lib\site-packages\wandb\wandb_config.py in persist(self)
194 conf_file.write(str(self))
195 if wandb.run and wandb.run._jupyter_agent:
--> 196 wandb.run._jupyter_agent.start()
197
198 def get(self, *args):
C:\Python37\lib\site-packages\wandb\jupyter.py in start(self)
120 def start(self):
121 if self.paused:
--> 122 self.rm = RunManager(wandb.run, output=False, cloud=wandb.run.mode != "dryrun")
123 wandb.run.api._file_stream_api = None
124 self.rm.mirror_stdout_stderr()
C:\Python37\lib\site-packages\wandb\run_manager.py in __init__(self, run, project, tags, cloud, output, port)
504 # Calling .start() on _meta and _system_stats will spin a thread that reports system stats every 30 seconds
505 self._system_stats = stats.SystemStats(run, self._api)
--> 506 self._meta = meta.Meta(self._api, self._run.dir)
507 self._meta.data["jobType"] = self._run.job_type
508 self._meta.data["mode"] = self._run.mode
C:\Python37\lib\site-packages\wandb\meta.py in __init__(self, api, out_dir)
34 self.data = {}
35 self.lock = threading.Lock()
---> 36 self.setup()
37 self._thread = threading.Thread(target=self._thread_body)
38 self._thread.daemon = True
C:\Python37\lib\site-packages\wandb\meta.py in setup(self)
52 self.data["program"] = os.getenv(env.NOTEBOOK_NAME)
53 else:
---> 54 meta = wandb.jupyter.notebook_metadata()
55 if meta.get("path"):
56 if "fileId=" in meta["path"]:
C:\Python37\lib\site-packages\wandb\jupyter.py in notebook_metadata()
108 if isinstance(nn, dict) and nn.get("kernel"):
109 if nn['kernel']['id'] == kernel_id:
--> 110 return {"root": s['notebook_dir'], "path": nn['notebook']['path'], "name": nn['notebook']['name']}
111 return {}
112
KeyError: 'notebook'
|
AttributeError
|
def _set_win_sizes(self):
try:
win_size = fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ, "\0" * 8)
except OSError: # eg. in MPI we can't do this
rows, cols, xpix, ypix = 25, 80, 0, 0
else:
rows, cols, xpix, ypix = struct.unpack("HHHH", win_size)
if cols == 0:
cols = 80
win_size = struct.pack("HHHH", rows, cols, xpix, ypix)
for fd in self.fds:
try:
fcntl.ioctl(fd, termios.TIOCSWINSZ, win_size)
except OSError: # eg. in MPI we can't do this
pass
|
def _set_win_sizes(self):
try:
win_size = fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ, "\0" * 8)
except OSError: # eg. in MPI we can't do this
rows, cols, xpix, ypix = 25, 80, 0, 0
else:
rows, cols, xpix, ypix = array.array("h", win_size)
if cols == 0:
cols = 80
win_size = struct.pack("HHHH", rows, cols, xpix, ypix)
for fd in self.fds:
try:
fcntl.ioctl(fd, termios.TIOCSWINSZ, win_size)
except OSError: # eg. in MPI we can't do this
pass
|
https://github.com/wandb/client/issues/401
|
(main) ÷19-06-28 09:14:41|ttys010|trippings-mbp-3|~/git0/mindscribe-question-ai\÷ python
Python 3.6.8 |Anaconda, Inc.| (default, Dec 29 2018, 19:04:46)
[GCC 4.2.1 Compatible Clang 4.0.1 (tags/RELEASE_401/final)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
import wandb
wandb.init(project='moonscrub')
wandb: Started W&B process version 0.8.3 with PID 33316
wandb: Local directory: wandb/run-20190628_161454-0ti1q1zb
wandb: Syncing run easy-lion-3: https://app.wandb.ai/yaroslavvb/moonscrub/runs/0ti1q1zb
wandb: Run `wandb off` to turn off syncing.
Traceback (most recent call last):
File "/Users/yaroslavvb/anaconda3/envs/main/lib/python3.6/site-packages/wandb/internal_cli.py", line 105, in <module>
main()
File "/Users/yaroslavvb/anaconda3/envs/main/lib/python3.6/site-packages/wandb/internal_cli.py", line 97, in main
headless(args)
File "/Users/yaroslavvb/anaconda3/envs/main/lib/python3.6/site-packages/wandb/internal_cli.py", line 53, in headless
util.sentry_reraise(e)
File "/Users/yaroslavvb/anaconda3/envs/main/lib/python3.6/site-packages/wandb/util.py", line 88, in sentry_reraise
six.reraise(type(exc), exc, sys.exc_info()[2])
File "/Users/yaroslavvb/anaconda3/envs/main/lib/python3.6/site-packages/six.py", line 693, in reraise
raise value
File "/Users/yaroslavvb/anaconda3/envs/main/lib/python3.6/site-packages/wandb/internal_cli.py", line 51, in headless
user_process_pid, stdout_master_fd, stderr_master_fd)
File "/Users/yaroslavvb/anaconda3/envs/main/lib/python3.6/site-packages/wandb/run_manager.py", line 1053, in wrap_existing_process
io_wrap.SIGWINCH_HANDLER.add_fd(stdout_read_fd)
File "/Users/yaroslavvb/anaconda3/envs/main/lib/python3.6/site-packages/wandb/io_wrap.py", line 118, in add_fd
self._set_win_sizes()
File "/Users/yaroslavvb/anaconda3/envs/main/lib/python3.6/site-packages/wandb/io_wrap.py", line 138, in _set_win_sizes
win_size = struct.pack("HHHH", rows, cols, xpix, ypix)
struct.error: ushort format requires 0 <= number <= (32767 *2 +1)
|
struct.error
|
def get_jk(
self, dm, hermi=1, kpts=None, kpts_band=None, with_j=True, with_k=True, exxdiv=None
):
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
if kpts.shape == (3,):
return aft_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j, with_k, exxdiv)
vj = vk = None
if with_k:
vk = aft_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = aft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
|
def get_jk(
self,
dm,
hermi=1,
kpts=None,
kpts_band=None,
with_j=True,
with_k=True,
exxdiv="ewald",
):
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
if kpts.shape == (3,):
return aft_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j, with_k, exxdiv)
vj = vk = None
if with_k:
vk = aft_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = aft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
|
https://github.com/pyscf/pyscf/issues/253
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1476, in kernel
return self.scf(dm0, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1470, in scf
conv_check=self.conv_check, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 143, in kernel
vhf = mf.get_veff(mol, dm)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/krohf.py", line 335, in get_veff
vj, vk = self.get_jk(cell, dm_kpts, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/khf.py", line 493, in get_jk
exxdiv=self.exxdiv)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft.py", line 288, in get_jk
vj = fft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft_jk.py", line 67, in get_j_kpts
rhoR[i,p0:p1] += make_rho(i, ao_ks, mask, 'LDA')
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1222, in make_rho
hermi=1)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1086, in eval_rho
rhoR += eval_rho(cell, ao_kpts[k], dm_kpts[k], non0tab, xctype,
IndexError: index 2 is out of bounds for axis 0 with size 2
|
IndexError
|
def get_jk(
self, dm, hermi=1, kpts=None, kpts_band=None, with_j=True, with_k=True, exxdiv=None
):
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
kpts = numpy.asarray(kpts)
if kpts.shape == (3,):
return df_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j, with_k, exxdiv)
vj = vk = None
if with_k:
vk = df_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = df_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
|
def get_jk(
self,
dm,
hermi=1,
kpts=None,
kpts_band=None,
with_j=True,
with_k=True,
exxdiv="ewald",
):
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
kpts = numpy.asarray(kpts)
if kpts.shape == (3,):
return df_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j, with_k, exxdiv)
vj = vk = None
if with_k:
vk = df_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = df_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
|
https://github.com/pyscf/pyscf/issues/253
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1476, in kernel
return self.scf(dm0, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1470, in scf
conv_check=self.conv_check, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 143, in kernel
vhf = mf.get_veff(mol, dm)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/krohf.py", line 335, in get_veff
vj, vk = self.get_jk(cell, dm_kpts, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/khf.py", line 493, in get_jk
exxdiv=self.exxdiv)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft.py", line 288, in get_jk
vj = fft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft_jk.py", line 67, in get_j_kpts
rhoR[i,p0:p1] += make_rho(i, ao_ks, mask, 'LDA')
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1222, in make_rho
hermi=1)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1086, in eval_rho
rhoR += eval_rho(cell, ao_kpts[k], dm_kpts[k], non0tab, xctype,
IndexError: index 2 is out of bounds for axis 0 with size 2
|
IndexError
|
def get_jk(
self, dm, hermi=1, kpts=None, kpts_band=None, with_j=True, with_k=True, exxdiv=None
):
from pyscf.pbc.df import fft_jk
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
else:
kpts = numpy.asarray(kpts)
vj = vk = None
if kpts.shape == (3,):
vj, vk = fft_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j, with_k, exxdiv)
else:
if with_k:
vk = fft_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = fft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
|
def get_jk(
self,
dm,
hermi=1,
kpts=None,
kpts_band=None,
with_j=True,
with_k=True,
exxdiv="ewald",
):
from pyscf.pbc.df import fft_jk
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
else:
kpts = numpy.asarray(kpts)
vj = vk = None
if kpts.shape == (3,):
vj, vk = fft_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j, with_k, exxdiv)
else:
if with_k:
vk = fft_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = fft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
|
https://github.com/pyscf/pyscf/issues/253
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1476, in kernel
return self.scf(dm0, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1470, in scf
conv_check=self.conv_check, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 143, in kernel
vhf = mf.get_veff(mol, dm)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/krohf.py", line 335, in get_veff
vj, vk = self.get_jk(cell, dm_kpts, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/khf.py", line 493, in get_jk
exxdiv=self.exxdiv)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft.py", line 288, in get_jk
vj = fft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft_jk.py", line 67, in get_j_kpts
rhoR[i,p0:p1] += make_rho(i, ao_ks, mask, 'LDA')
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1222, in make_rho
hermi=1)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1086, in eval_rho
rhoR += eval_rho(cell, ao_kpts[k], dm_kpts[k], non0tab, xctype,
IndexError: index 2 is out of bounds for axis 0 with size 2
|
IndexError
|
def get_jk(
self, dm, hermi=1, kpts=None, kpts_band=None, with_j=True, with_k=True, exxdiv=None
):
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
kpts = numpy.asarray(kpts)
if kpts.shape == (3,):
return mdf_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j, with_k, exxdiv)
vj = vk = None
if with_k:
vk = mdf_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = mdf_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
|
def get_jk(
self,
dm,
hermi=1,
kpts=None,
kpts_band=None,
with_j=True,
with_k=True,
exxdiv="ewald",
):
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
kpts = numpy.asarray(kpts)
if kpts.shape == (3,):
return mdf_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j, with_k, exxdiv)
vj = vk = None
if with_k:
vk = mdf_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = mdf_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
|
https://github.com/pyscf/pyscf/issues/253
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1476, in kernel
return self.scf(dm0, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1470, in scf
conv_check=self.conv_check, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 143, in kernel
vhf = mf.get_veff(mol, dm)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/krohf.py", line 335, in get_veff
vj, vk = self.get_jk(cell, dm_kpts, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/khf.py", line 493, in get_jk
exxdiv=self.exxdiv)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft.py", line 288, in get_jk
vj = fft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft_jk.py", line 67, in get_j_kpts
rhoR[i,p0:p1] += make_rho(i, ao_ks, mask, 'LDA')
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1222, in make_rho
hermi=1)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1086, in eval_rho
rhoR += eval_rho(cell, ao_kpts[k], dm_kpts[k], non0tab, xctype,
IndexError: index 2 is out of bounds for axis 0 with size 2
|
IndexError
|
def get_init_guess(self, cell=None, key="minao"):
if cell is None:
cell = self.cell
dm_kpts = None
key = key.lower()
if key == "1e" or key == "hcore":
dm_kpts = self.init_guess_by_1e(cell)
elif getattr(cell, "natm", 0) == 0:
logger.info(self, "No atom found in cell. Use 1e initial guess")
dm_kpts = self.init_guess_by_1e(cell)
elif key == "atom":
dm = self.init_guess_by_atom(cell)
elif key[:3] == "chk":
try:
dm_kpts = self.from_chk()
except (IOError, KeyError):
logger.warn(self, "Fail to read %s. Use MINAO initial guess", self.chkfile)
dm = self.init_guess_by_minao(cell)
else:
dm = self.init_guess_by_minao(cell)
if dm_kpts is None:
dm_kpts = lib.asarray([dm] * len(self.kpts))
if cell.dimension < 3:
ne = np.einsum("kij,kji->k", dm_kpts, self.get_ovlp(cell)).real
nelec = cell.nelectron
if np.any(abs(ne - nelec) > 1e-7):
logger.warn(
self,
"Big error detected in the electron number "
"of initial guess density matrix (Ne/cell = %g)!\n"
" This can cause huge error in Fock matrix and "
"lead to instability in SCF for low-dimensional "
"systems.\n DM is normalized to correct number "
"of electrons",
ne.mean(),
)
dm_kpts *= (nelec / ne).reshape(-1, 1, 1)
return dm_kpts
|
def get_init_guess(self, cell=None, key="minao"):
if cell is None:
cell = self.cell
dm_kpts = None
key = key.lower()
if key == "1e" or key == "hcore":
dm_kpts = self.init_guess_by_1e(cell)
elif getattr(cell, "natm", 0) == 0:
logger.info(self, "No atom found in cell. Use 1e initial guess")
dm_kpts = self.init_guess_by_1e(cell)
elif key == "atom":
dm = self.init_guess_by_atom(cell)
elif key[:3] == "chk":
try:
dm_kpts = self.from_chk()
except (IOError, KeyError):
logger.warn(self, "Fail to read %s. Use MINAO initial guess", self.chkfile)
dm = self.init_guess_by_minao(cell)
else:
dm = self.init_guess_by_minao(cell)
if dm_kpts is None:
dm_kpts = lib.asarray([dm] * len(self.kpts))
if cell.dimension < 3:
ne = np.einsum("kij,kji->k", dm_kpts, self.get_ovlp(cell)).real
if np.any(abs(ne - cell.nelectron) > 1e-7):
logger.warn(
self,
"Big error detected in the electron number "
"of initial guess density matrix (Ne/cell = %g)!\n"
" This can cause huge error in Fock matrix and "
"lead to instability in SCF for low-dimensional "
"systems.\n DM is normalized to correct number "
"of electrons",
ne.mean(),
)
dm_kpts *= cell.nelectron / ne.reshape(-1, 1, 1)
return dm_kpts
|
https://github.com/pyscf/pyscf/issues/253
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1476, in kernel
return self.scf(dm0, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1470, in scf
conv_check=self.conv_check, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 143, in kernel
vhf = mf.get_veff(mol, dm)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/krohf.py", line 335, in get_veff
vj, vk = self.get_jk(cell, dm_kpts, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/khf.py", line 493, in get_jk
exxdiv=self.exxdiv)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft.py", line 288, in get_jk
vj = fft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft_jk.py", line 67, in get_j_kpts
rhoR[i,p0:p1] += make_rho(i, ao_ks, mask, 'LDA')
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1222, in make_rho
hermi=1)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1086, in eval_rho
rhoR += eval_rho(cell, ao_kpts[k], dm_kpts[k], non0tab, xctype,
IndexError: index 2 is out of bounds for axis 0 with size 2
|
IndexError
|
def get_init_guess(self, cell=None, key="minao"):
if cell is None:
cell = self.cell
dm_kpts = None
key = key.lower()
if key == "1e" or key == "hcore":
dm_kpts = self.init_guess_by_1e(cell)
elif getattr(cell, "natm", 0) == 0:
logger.info(self, "No atom found in cell. Use 1e initial guess")
dm_kpts = self.init_guess_by_1e(cell)
elif key == "atom":
dm = self.init_guess_by_atom(cell)
elif key[:3] == "chk":
try:
dm_kpts = self.from_chk()
except (IOError, KeyError):
logger.warn(self, "Fail to read %s. Use MINAO initial guess", self.chkfile)
dm = self.init_guess_by_minao(cell)
else:
dm = self.init_guess_by_minao(cell)
if dm_kpts is None:
nkpts = len(self.kpts)
# dm[spin,nao,nao] at gamma point -> dm_kpts[spin,nkpts,nao,nao]
dm_kpts = np.repeat(dm[:, None, :, :], nkpts, axis=1)
if cell.dimension < 3:
ne = np.einsum("xkij,kji->xk", dm_kpts, self.get_ovlp(cell)).real
nelec = np.asarray(cell.nelec).reshape(2, 1)
if np.any(abs(ne - nelec) > 1e-7):
logger.warn(
self,
"Big error detected in the electron number "
"of initial guess density matrix (Ne/cell = %g)!\n"
" This can cause huge error in Fock matrix and "
"lead to instability in SCF for low-dimensional "
"systems.\n DM is normalized to correct number "
"of electrons",
ne.mean(),
)
dm_kpts *= (nelec / ne).reshape(2, -1, 1, 1)
return dm_kpts
|
def get_init_guess(self, cell=None, key="minao"):
if cell is None:
cell = self.cell
dm_kpts = None
if key.lower() == "1e":
dm_kpts = self.init_guess_by_1e(cell)
elif getattr(cell, "natm", 0) == 0:
logger.info(self, "No atom found in cell. Use 1e initial guess")
dm_kpts = self.init_guess_by_1e(cell)
elif key.lower() == "atom":
dm = self.init_guess_by_atom(cell)
elif key.lower().startswith("chk"):
try:
dm_kpts = self.from_chk()
except (IOError, KeyError):
logger.warn(self, "Fail to read %s. Use MINAO initial guess", self.chkfile)
dm = self.init_guess_by_minao(cell)
else:
dm = self.init_guess_by_minao(cell)
if dm_kpts is None:
dm_kpts = lib.asarray([dm] * len(self.kpts))
if cell.dimension < 3:
ne = np.einsum("xkij,kji->xk", dm_kpts, self.get_ovlp(cell))
nelec = np.asarray(cell.nelec).reshape(2, 1)
if np.any(abs(ne - nelec) > 1e-7):
logger.warn(
self,
"Big error detected in the electron number "
"of initial guess density matrix (Ne/cell = %g)!\n"
" This can cause huge error in Fock matrix and "
"lead to instability in SCF for low-dimensional "
"systems.\n DM is normalized to correct number "
"of electrons",
ne.mean(),
)
dm_kpts *= (nelec / ne).reshape(2, -1, 1, 1)
return dm_kpts
|
https://github.com/pyscf/pyscf/issues/253
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1476, in kernel
return self.scf(dm0, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1470, in scf
conv_check=self.conv_check, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 143, in kernel
vhf = mf.get_veff(mol, dm)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/krohf.py", line 335, in get_veff
vj, vk = self.get_jk(cell, dm_kpts, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/khf.py", line 493, in get_jk
exxdiv=self.exxdiv)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft.py", line 288, in get_jk
vj = fft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft_jk.py", line 67, in get_j_kpts
rhoR[i,p0:p1] += make_rho(i, ao_ks, mask, 'LDA')
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1222, in make_rho
hermi=1)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1086, in eval_rho
rhoR += eval_rho(cell, ao_kpts[k], dm_kpts[k], non0tab, xctype,
IndexError: index 2 is out of bounds for axis 0 with size 2
|
IndexError
|
def get_init_guess(self, cell=None, key="minao"):
if cell is None:
cell = self.cell
dm_kpts = None
key = key.lower()
if key == "1e" or key == "hcore":
dm_kpts = self.init_guess_by_1e(cell)
elif getattr(cell, "natm", 0) == 0:
logger.info(self, "No atom found in cell. Use 1e initial guess")
dm_kpts = self.init_guess_by_1e(cell)
elif key == "atom":
dm = self.init_guess_by_atom(cell)
elif key[:3] == "chk":
try:
dm_kpts = self.from_chk()
except (IOError, KeyError):
logger.warn(self, "Fail to read %s. Use MINAO initial guess", self.chkfile)
dm = self.init_guess_by_minao(cell)
else:
dm = self.init_guess_by_minao(cell)
if dm_kpts is None:
nao = dm[0].shape[-1]
nkpts = len(self.kpts)
# dm[spin,nao,nao] at gamma point -> dm_kpts[spin,nkpts,nao,nao]
dm_kpts = np.repeat(dm[:, None, :, :], nkpts, axis=1)
dm_kpts[0, :] *= 1.01
dm_kpts[1, :] *= 0.99 # To slightly break spin symmetry
assert dm_kpts.shape[0] == 2
if cell.dimension < 3:
ne = np.einsum("xkij,kji->xk", dm_kpts, self.get_ovlp(cell)).real
nelec = np.asarray(cell.nelec).reshape(2, 1)
if np.any(abs(ne - nelec) > 1e-7):
logger.warn(
self,
"Big error detected in the electron number "
"of initial guess density matrix (Ne/cell = %g)!\n"
" This can cause huge error in Fock matrix and "
"lead to instability in SCF for low-dimensional "
"systems.\n DM is normalized to correct number "
"of electrons",
ne.mean(),
)
dm_kpts *= (nelec / ne).reshape(2, -1, 1, 1)
return dm_kpts
|
def get_init_guess(self, cell=None, key="minao"):
if cell is None:
cell = self.cell
dm_kpts = None
if key.lower() == "1e":
dm_kpts = self.init_guess_by_1e(cell)
elif getattr(cell, "natm", 0) == 0:
logger.info(self, "No atom found in cell. Use 1e initial guess")
dm_kpts = self.init_guess_by_1e(cell)
elif key.lower() == "atom":
dm = self.init_guess_by_atom(cell)
elif key.lower().startswith("chk"):
try:
dm_kpts = self.from_chk()
except (IOError, KeyError):
logger.warn(self, "Fail to read %s. Use MINAO initial guess", self.chkfile)
dm = self.init_guess_by_minao(cell)
else:
dm = self.init_guess_by_minao(cell)
if dm_kpts is None:
nao = dm[0].shape[-1]
nkpts = len(self.kpts)
dm_kpts = lib.asarray([dm] * nkpts).reshape(nkpts, 2, nao, nao)
dm_kpts = dm_kpts.transpose(1, 0, 2, 3)
dm_kpts[0, :] *= 1.01
dm_kpts[1, :] *= 0.99 # To break spin symmetry
assert dm_kpts.shape[0] == 2
if cell.dimension < 3:
ne = np.einsum("xkij,kji->xk", dm_kpts, self.get_ovlp(cell)).real
nelec = np.asarray(cell.nelec).reshape(2, 1)
if np.any(abs(ne - nelec) > 1e-7):
logger.warn(
self,
"Big error detected in the electron number "
"of initial guess density matrix (Ne/cell = %g)!\n"
" This can cause huge error in Fock matrix and "
"lead to instability in SCF for low-dimensional "
"systems.\n DM is normalized to correct number "
"of electrons",
ne.mean(),
)
dm_kpts *= (nelec / ne).reshape(2, -1, 1, 1)
return dm_kpts
|
https://github.com/pyscf/pyscf/issues/253
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1476, in kernel
return self.scf(dm0, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 1470, in scf
conv_check=self.conv_check, **kwargs)
File "/home/gagliard/phamx494/pyscf/pyscf/scf/hf.py", line 143, in kernel
vhf = mf.get_veff(mol, dm)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/krohf.py", line 335, in get_veff
vj, vk = self.get_jk(cell, dm_kpts, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/scf/khf.py", line 493, in get_jk
exxdiv=self.exxdiv)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft.py", line 288, in get_jk
vj = fft_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/df/fft_jk.py", line 67, in get_j_kpts
rhoR[i,p0:p1] += make_rho(i, ao_ks, mask, 'LDA')
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1222, in make_rho
hermi=1)
File "/home/gagliard/phamx494/pyscf/pyscf/pbc/dft/numint.py", line 1086, in eval_rho
rhoR += eval_rho(cell, ao_kpts[k], dm_kpts[k], non0tab, xctype,
IndexError: index 2 is out of bounds for axis 0 with size 2
|
IndexError
|
def export_items():
result = {
"timestamp": datetime.utcnow().isoformat(),
"scope": [
{
"target": item.target,
"blacklist": item.blacklist,
"tags": item.get_tag_names(),
}
for item in ScopeItem.getScope()
],
"blacklist": [
{
"target": item.target,
"blacklist": item.blacklist,
"tags": item.get_tag_names(),
}
for item in ScopeItem.getBlacklist()
],
}
print(json.dumps(result, indent=2))
|
def export_items():
result = {
"timestamp": datetime.utcnow().isoformat(),
"scope": [
{"target": item.target, "blacklist": item.blacklist, "tags": item.tags}
for item in ScopeItem.getScope()
],
"blacklist": [
{"target": item.target, "blacklist": item.blacklist, "tags": item.tags}
for item in ScopeItem.getBlacklist()
],
}
print(json.dumps(result, indent=2))
|
https://github.com/natlas/natlas/issues/426
|
root@natlas:~# docker exec -ti natlas_server flask scope export
Sentry.io enabled and reporting errors to https://sentry.technowizardry.net
Traceback (most recent call last):
File "/.venv/bin/flask", line 8, in <module>
sys.exit(main())
File "/.venv/lib/python3.8/site-packages/flask/cli.py", line 967, in main
cli.main(args=sys.argv[1:], prog_name="python -m flask" if as_module else None)
File "/.venv/lib/python3.8/site-packages/flask/cli.py", line 586, in main
return super(FlaskGroup, self).main(*args, **kwargs)
File "/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/.venv/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/.venv/lib/python3.8/site-packages/flask/cli.py", line 426, in decorator
return __ctx.invoke(f, *args, **kwargs)
File "/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/opt/natlas/natlas-server/app/cli/scope.py", line 79, in export_items
print(json.dumps(result, indent=2))
File "/usr/local/lib/python3.8/json/__init__.py", line 234, in dumps
return cls(
File "/usr/local/lib/python3.8/json/encoder.py", line 201, in encode
chunks = list(chunks)
File "/usr/local/lib/python3.8/json/encoder.py", line 431, in _iterencode
yield from _iterencode_dict(o, _current_indent_level)
File "/usr/local/lib/python3.8/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/local/lib/python3.8/json/encoder.py", line 325, in _iterencode_list
yield from chunks
File "/usr/local/lib/python3.8/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/local/lib/python3.8/json/encoder.py", line 325, in _iterencode_list
yield from chunks
File "/usr/local/lib/python3.8/json/encoder.py", line 438, in _iterencode
o = _default(o)
File "/usr/local/lib/python3.8/json/encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type Tag is not JSON serializable
|
TypeError
|
def get_web_screenshots(target, scan_id, proctimeout):
scan_dir = utils.get_scan_dir(scan_id)
xml_file = os.path.join(scan_dir, f"nmap.{scan_id}.xml")
output_dir = os.path.join(scan_dir, f"aquatone.{scan_id}")
logger.info(f"Attempting to take screenshots for {target}")
aquatoneArgs = ["aquatone", "-nmap", "-scan-timeout", "2500", "-out", output_dir]
with open(xml_file, "r") as f:
process = subprocess.Popen(aquatoneArgs, stdin=f, stdout=subprocess.DEVNULL) # nosec
try:
process.communicate(timeout=proctimeout)
if process.returncode == 0:
time.sleep(
0.5
) # a small sleep to make sure all file handles are closed so that the agent can read them
except subprocess.TimeoutExpired:
logger.warning(f"TIMEOUT: Killing aquatone against {target}")
process.kill()
return parse_aquatone_session(output_dir)
|
def get_web_screenshots(target, scan_id, proctimeout):
scan_dir = utils.get_scan_dir(scan_id)
xml_file = os.path.join(scan_dir, f"nmap.{scan_id}.xml")
outFiles = os.path.join(scan_dir, f"aquatone.{scan_id}")
output = []
logger.info(f"Attempting to take screenshots for {target}")
aquatoneArgs = ["aquatone", "-nmap", "-scan-timeout", "2500", "-out", outFiles]
with open(xml_file, "r") as f:
process = subprocess.Popen(aquatoneArgs, stdin=f, stdout=subprocess.DEVNULL) # nosec
try:
process.communicate(timeout=proctimeout)
if process.returncode == 0:
time.sleep(
0.5
) # a small sleep to make sure all file handles are closed so that the agent can read them
except subprocess.TimeoutExpired:
logger.warning(f"TIMEOUT: Killing aquatone against {target}")
process.kill()
session_path = os.path.join(outFiles, "aquatone_session.json")
if not os.path.isfile(session_path):
return output
with open(session_path) as f:
session = json.load(f)
if session["stats"]["screenshotSuccessful"] > 0:
logger.info(
f"{target} - Success: {session['stats']['screenshotSuccessful']}, Fail: {session['stats']['screenshotFailed']}"
)
for k, page in session["pages"].items():
fqScreenshotPath = os.path.join(outFiles, page["screenshotPath"])
if page["hasScreenshot"] and os.path.isfile(fqScreenshotPath):
urlp = urlparse(page["url"])
if not urlp.port and urlp.scheme == "http":
port = 80
elif not urlp.port and urlp.scheme == "https":
port = 443
else:
port = urlp.port
logger.info(
f"{urlp.scheme.upper()} screenshot acquired for {page['hostname']} on port {port}"
)
output.append(
{
"host": page["hostname"],
"port": port,
"service": urlp.scheme.upper(),
"data": base64_image(fqScreenshotPath),
}
)
return output
|
https://github.com/natlas/natlas/issues/412
|
[2020-07-25 00:22:59,784] ERROR in app: Exception on /api/submit [POST]
Traceback (most recent call last):
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/.venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 49, in decorated_function
return f(*args, **kwargs)
File "/opt/natlas/natlas-server/app/api/routes.py", line 187, in submit
newhost["screenshots"], newhost["num_screenshots"] = process_screenshots(
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 52, in process_screenshots
item["thumb_hash"] = create_thumbnail(fname, file_ext)
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 10, in create_thumbnail
thumb = Image.open(fname)
File "/.venv/lib/python3.8/site-packages/PIL/Image.py", line 2895, in open
raise UnidentifiedImageError(
PIL.UnidentifiedImageError: cannot identify image file '/data/media/original/e3/b0/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855.png'
|
PIL.UnidentifiedImageError
|
def get_vnc_screenshots(target, scan_id, proctimeout):
scan_dir = utils.get_scan_dir(scan_id)
output_file = os.path.join(scan_dir, f"vncsnapshot.{scan_id}.jpg")
logger.info(f"Attempting to take VNC screenshot for {target}")
vncsnapshot_args = [
"xvfb-run",
"vncsnapshot",
"-quality",
"50",
target,
output_file,
]
process = subprocess.Popen(
vncsnapshot_args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
) # nosec
try:
process.communicate(timeout=proctimeout)
except subprocess.TimeoutExpired:
logger.warning(f"TIMEOUT: Killing vncsnapshot against {target}")
process.kill()
if not is_valid_image(output_file):
return {}
logger.info(f"VNC screenshot acquired for {target} on port 5900")
return {
"host": target,
"port": 5900,
"service": "VNC",
"data": base64_file(output_file),
}
|
def get_vnc_screenshots(target, scan_id, proctimeout):
scan_dir = utils.get_scan_dir(scan_id)
outFile = os.path.join(scan_dir, f"vncsnapshot.{scan_id}.jpg")
logger.info(f"Attempting to take VNC screenshot for {target}")
process = subprocess.Popen(
["xvfb-run", "vncsnapshot", "-quality", "50", target, outFile],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
) # nosec
try:
process.communicate(timeout=proctimeout)
if process.returncode == 0:
return True
except Exception:
try:
logger.warning(f"TIMEOUT: Killing vncsnapshot against {target}")
process.kill()
return False
except Exception:
pass
return False
|
https://github.com/natlas/natlas/issues/412
|
[2020-07-25 00:22:59,784] ERROR in app: Exception on /api/submit [POST]
Traceback (most recent call last):
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/.venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 49, in decorated_function
return f(*args, **kwargs)
File "/opt/natlas/natlas-server/app/api/routes.py", line 187, in submit
newhost["screenshots"], newhost["num_screenshots"] = process_screenshots(
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 52, in process_screenshots
item["thumb_hash"] = create_thumbnail(fname, file_ext)
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 10, in create_thumbnail
thumb = Image.open(fname)
File "/.venv/lib/python3.8/site-packages/PIL/Image.py", line 2895, in open
raise UnidentifiedImageError(
PIL.UnidentifiedImageError: cannot identify image file '/data/media/original/e3/b0/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855.png'
|
PIL.UnidentifiedImageError
|
def scan(target_data, config):
if not utils.validate_target(target_data["target"], config):
return False
target = target_data["target"]
scan_id = target_data["scan_id"]
agentConfig = target_data["agent_config"]
command = command_builder(scan_id, agentConfig, target)
scan_dir = utils.get_scan_dir(scan_id)
result = ScanResult(target_data, config)
try:
subprocess.run(
command,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
timeout=int(agentConfig["scanTimeout"]),
) # nosec
except subprocess.TimeoutExpired:
result.add_item("timed_out", True)
logger.warning(f"TIMEOUT: Nmap against {target} ({scan_id})")
return result
logger.info(f"Nmap {target} ({scan_id}) complete")
for ext in "nmap", "gnmap", "xml":
path = os.path.join(scan_dir, f"nmap.{scan_id}.{ext}")
try:
with open(path, "r") as f:
result.add_item(ext + "_data", f.read())
except Exception:
logger.warning(f"Couldn't read {path}")
return False
try:
nmap_report = NmapParser.parse(result.result["xml_data"])
except NmapParserException:
logger.warning(f"Couldn't parse nmap.{scan_id}.xml")
return False
if nmap_report.hosts_total < 1:
logger.warning(f"No hosts found in nmap.{scan_id}.xml")
return False
elif nmap_report.hosts_total > 1:
logger.warning(f"Too many hosts found in nmap.{scan_id}.xml")
return False
elif nmap_report.hosts_down == 1:
# host is down
result.is_up(False)
return result
elif nmap_report.hosts_up == 1 and len(nmap_report.hosts) == 0:
# host is up but no reportable ports were found
result.is_up(True)
result.add_item("port_count", 0)
return result
else:
# host is up and reportable ports were found
result.is_up(nmap_report.hosts[0].is_up())
result.add_item("port_count", len(nmap_report.hosts[0].get_ports()))
if agentConfig["webScreenshots"]:
screens = screenshots.get_web_screenshots(
target, scan_id, agentConfig["webScreenshotTimeout"]
)
for item in screens:
result.add_screenshot(item)
if agentConfig["vncScreenshots"] and "5900/tcp" in result.result["nmap_data"]:
vnc_screenshot = screenshots.get_vnc_screenshots(
target, scan_id, agentConfig["vncScreenshotTimeout"]
)
if vnc_screenshot:
result.add_screenshot(vnc_screenshot)
# submit result
return result
|
def scan(target_data, config):
if not utils.validate_target(target_data["target"], config):
return False
target = target_data["target"]
scan_id = target_data["scan_id"]
agentConfig = target_data["agent_config"]
command = command_builder(scan_id, agentConfig, target)
scan_dir = utils.get_scan_dir(scan_id)
result = ScanResult(target_data, config)
try:
subprocess.run(
command,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
timeout=int(agentConfig["scanTimeout"]),
) # nosec
except subprocess.TimeoutExpired:
result.add_item("timed_out", True)
logger.warning(f"TIMEOUT: Nmap against {target} ({scan_id})")
return result
logger.info(f"Nmap {target} ({scan_id}) complete")
for ext in "nmap", "gnmap", "xml":
path = os.path.join(scan_dir, f"nmap.{scan_id}.{ext}")
try:
with open(path, "r") as f:
result.add_item(ext + "_data", f.read())
except Exception:
logger.warning(f"Couldn't read {path}")
return False
try:
nmap_report = NmapParser.parse(result.result["xml_data"])
except NmapParserException:
logger.warning(f"Couldn't parse nmap.{scan_id}.xml")
return False
if nmap_report.hosts_total < 1:
logger.warning(f"No hosts found in nmap.{scan_id}.xml")
return False
elif nmap_report.hosts_total > 1:
logger.warning(f"Too many hosts found in nmap.{scan_id}.xml")
return False
elif nmap_report.hosts_down == 1:
# host is down
result.is_up(False)
return result
elif nmap_report.hosts_up == 1 and len(nmap_report.hosts) == 0:
# host is up but no reportable ports were found
result.is_up(True)
result.add_item("port_count", 0)
return result
else:
# host is up and reportable ports were found
result.is_up(nmap_report.hosts[0].is_up())
result.add_item("port_count", len(nmap_report.hosts[0].get_ports()))
if agentConfig["webScreenshots"] and shutil.which("aquatone") is not None:
screens = screenshots.get_web_screenshots(
target, scan_id, agentConfig["webScreenshotTimeout"]
)
for item in screens:
result.add_screenshot(item)
if (
agentConfig["vncScreenshots"]
and "5900/tcp" in result.result["nmap_data"]
and screenshots.get_vnc_screenshots(
target, scan_id, agentConfig["vncScreenshotTimeout"]
)
):
screenshotPath = os.path.join(scan_dir, f"vncsnapshot.{scan_id}.jpg")
if os.path.isfile(screenshotPath):
result.add_screenshot(
{
"host": target,
"port": 5900,
"service": "VNC",
"data": screenshots.base64_image(screenshotPath),
}
)
logger.info(f"VNC screenshot acquired for {result.result['ip']}")
# submit result
return result
|
https://github.com/natlas/natlas/issues/412
|
[2020-07-25 00:22:59,784] ERROR in app: Exception on /api/submit [POST]
Traceback (most recent call last):
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/.venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 49, in decorated_function
return f(*args, **kwargs)
File "/opt/natlas/natlas-server/app/api/routes.py", line 187, in submit
newhost["screenshots"], newhost["num_screenshots"] = process_screenshots(
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 52, in process_screenshots
item["thumb_hash"] = create_thumbnail(fname, file_ext)
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 10, in create_thumbnail
thumb = Image.open(fname)
File "/.venv/lib/python3.8/site-packages/PIL/Image.py", line 2895, in open
raise UnidentifiedImageError(
PIL.UnidentifiedImageError: cannot identify image file '/data/media/original/e3/b0/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855.png'
|
PIL.UnidentifiedImageError
|
def create_thumbnail(fname, file_ext):
thumb_size = (255, 160)
thumb = Image.open(fname)
thumb.thumbnail(thumb_size)
thumb_hash = hashlib.sha256(thumb.tobytes()).hexdigest()
fname = get_file_path(thumb_hash, "thumbs", file_ext)
thumb.save(fname)
thumb.close()
return thumb_hash
|
def create_thumbnail(fname, file_ext):
thumb_size = (255, 160)
thumb = Image.open(fname)
thumb.thumbnail(thumb_size)
thumb_hash = hashlib.sha256(thumb.tobytes()).hexdigest()
thumbhashpath = f"{thumb_hash[0:2]}/{thumb_hash[2:4]}"
thumbpath = os.path.join(
current_app.config["MEDIA_DIRECTORY"], "thumbs", thumbhashpath
)
# makedirs attempts to make every directory necessary to get to the "thumbs" folder
os.makedirs(thumbpath, exist_ok=True)
fname = os.path.join(thumbpath, thumb_hash + file_ext)
thumb.save(fname)
thumb.close()
return thumb_hash
|
https://github.com/natlas/natlas/issues/412
|
[2020-07-25 00:22:59,784] ERROR in app: Exception on /api/submit [POST]
Traceback (most recent call last):
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/.venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 49, in decorated_function
return f(*args, **kwargs)
File "/opt/natlas/natlas-server/app/api/routes.py", line 187, in submit
newhost["screenshots"], newhost["num_screenshots"] = process_screenshots(
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 52, in process_screenshots
item["thumb_hash"] = create_thumbnail(fname, file_ext)
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 10, in create_thumbnail
thumb = Image.open(fname)
File "/.venv/lib/python3.8/site-packages/PIL/Image.py", line 2895, in open
raise UnidentifiedImageError(
PIL.UnidentifiedImageError: cannot identify image file '/data/media/original/e3/b0/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855.png'
|
PIL.UnidentifiedImageError
|
def process_screenshots(screenshots: list) -> tuple:
processed_screenshots = []
for item in screenshots:
screenshot = process_screenshot(item)
if not screenshot:
current_app.logger.warning(
f"Received invalid image for {screenshot['hostname']} port {screenshot['port']}"
)
continue
processed_screenshots.append(item)
return processed_screenshots, len(processed_screenshots)
|
def process_screenshots(screenshots):
# Handle screenshots
num_screenshots = 0
for item in screenshots:
if item["service"] == "VNC":
file_ext = ".jpg"
else: # Handles http, https files from aquatone/chromium-headless
file_ext = ".png"
image = base64.b64decode(item["data"])
image_hash = hashlib.sha256(image).hexdigest()
hashpath = f"{image_hash[0:2]}/{image_hash[2:4]}"
dirpath = os.path.join(
current_app.config["MEDIA_DIRECTORY"], "original", hashpath
)
# makedirs attempts to make every directory necessary to get to the "original" folder
os.makedirs(dirpath, exist_ok=True)
fname = os.path.join(dirpath, image_hash + file_ext)
with open(fname, "wb") as f:
f.write(image)
item["hash"] = image_hash
del item["data"]
item["thumb_hash"] = create_thumbnail(fname, file_ext)
num_screenshots += 1
return screenshots, num_screenshots
|
https://github.com/natlas/natlas/issues/412
|
[2020-07-25 00:22:59,784] ERROR in app: Exception on /api/submit [POST]
Traceback (most recent call last):
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/.venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 49, in decorated_function
return f(*args, **kwargs)
File "/opt/natlas/natlas-server/app/api/routes.py", line 187, in submit
newhost["screenshots"], newhost["num_screenshots"] = process_screenshots(
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 52, in process_screenshots
item["thumb_hash"] = create_thumbnail(fname, file_ext)
File "/opt/natlas/natlas-server/app/api/processing/screenshot.py", line 10, in create_thumbnail
thumb = Image.open(fname)
File "/.venv/lib/python3.8/site-packages/PIL/Image.py", line 2895, in open
raise UnidentifiedImageError(
PIL.UnidentifiedImageError: cannot identify image file '/data/media/original/e3/b0/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855.png'
|
PIL.UnidentifiedImageError
|
def _initialize_indices(self):
"""Check each required index and make sure it exists, if it doesn't then create it"""
for index in self.natlasIndices:
if not self.es.indices.exists(index):
self.es.indices.create(index)
# Avoid a race condition
time.sleep(2)
for index in self.natlasIndices:
if self.esversion.match("<7.0.0"):
self.es.indices.put_mapping(
index=index,
doc_type="_doc",
body=self.mapping,
include_type_name=True,
)
else:
self.es.indices.put_mapping(index=index, body=self.mapping)
|
def _initialize_indices(self):
"""Check each required index and make sure it exists, if it doesn't then create it"""
for index in self.natlasIndices:
if not self.es.indices.exists(index):
self.es.indices.create(index)
# Avoid a race condition
time.sleep(2)
for index in self.natlasIndices:
if self.esversion.match(">=7.0.0"):
self.es.indices.put_mapping(
index=index,
doc_type="_doc",
body=self.mapping,
include_type_name=True,
)
else:
self.es.indices.put_mapping(index=index, doc_type="_doc", body=self.mapping)
|
https://github.com/natlas/natlas/issues/311
|
172.18.0.1 - - [26/Jun/2020 08:50:28] "POST /admin/scans/delete/60ab2a6a40250d8c6c13e9f6d1a39e84 HTTP/1.1" 500 -
Traceback (most recent call last):
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2464, in __call__
return self.wsgi_app(environ, start_response)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2450, in wsgi_app
response = self.handle_exception(e)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1867, in handle_exception
reraise(exc_type, exc_value, tb)
File "/.venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/.venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/.venv/lib/python3.8/site-packages/flask_login/utils.py", line 272, in decorated_view
return func(*args, **kwargs)
File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 38, in decorated_function
return f(*args, **kwargs)
File "/opt/natlas/natlas-server/app/admin/routes.py", line 463, in delete_scan
deleted = current_app.elastic.delete_scan(scan_id)
File "/opt/natlas/natlas-server/app/elastic/interface.py", line 153, in delete_scan
result = self.client.execute_delete_by_query(
File "/opt/natlas/natlas-server/app/elastic/client.py", line 132, in execute_delete_by_query
self._attach_shard_span_attrs(span, results)
File "/opt/natlas/natlas-server/app/elastic/client.py", line 165, in _attach_shard_span_attrs
span.add_attribute("es.shards.total", results["_shards"]["total"])
KeyError: '_shards'
|
KeyError
|
def execute_delete_by_query(self, **kwargs):
"""Executes an arbitrary delete_by_query."""
with self._new_trace_span(operation="delete_by", **kwargs):
results = self._execute_raw_query(
self.es.delete_by_query, doc_type="_doc", **kwargs
)
return results
|
def execute_delete_by_query(self, **kwargs):
"""Executes an arbitrary delete_by_query."""
with self._new_trace_span(operation="delete_by", **kwargs) as span:
results = self._execute_raw_query(
self.es.delete_by_query, doc_type="_doc", **kwargs
)
self._attach_shard_span_attrs(span, results)
return results
|
https://github.com/natlas/natlas/issues/311
|
172.18.0.1 - - [26/Jun/2020 08:50:28] "POST /admin/scans/delete/60ab2a6a40250d8c6c13e9f6d1a39e84 HTTP/1.1" 500 -
Traceback (most recent call last):
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2464, in __call__
return self.wsgi_app(environ, start_response)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2450, in wsgi_app
response = self.handle_exception(e)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1867, in handle_exception
reraise(exc_type, exc_value, tb)
File "/.venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/.venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/.venv/lib/python3.8/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/.venv/lib/python3.8/site-packages/flask_login/utils.py", line 272, in decorated_view
return func(*args, **kwargs)
File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 38, in decorated_function
return f(*args, **kwargs)
File "/opt/natlas/natlas-server/app/admin/routes.py", line 463, in delete_scan
deleted = current_app.elastic.delete_scan(scan_id)
File "/opt/natlas/natlas-server/app/elastic/interface.py", line 153, in delete_scan
result = self.client.execute_delete_by_query(
File "/opt/natlas/natlas-server/app/elastic/client.py", line 132, in execute_delete_by_query
self._attach_shard_span_attrs(span, results)
File "/opt/natlas/natlas-server/app/elastic/client.py", line 165, in _attach_shard_span_attrs
span.add_attribute("es.shards.total", results["_shards"]["total"])
KeyError: '_shards'
|
KeyError
|
def create_app(config_class=Config, load_config=False):
initialize_sentryio(config_class)
app = Flask(__name__)
app.config.from_object(Config)
app.jinja_env.add_extension("jinja2.ext.do")
db.init_app(app)
migrate.init_app(app, db)
login.init_app(app)
mail.init_app(app)
csrf.init_app(app)
if load_config:
print("Loading Config from database")
with app.app_context():
from app.models import ConfigItem
# This is gross but we need it because otherwise flask db operations won't work to create the ConfigItem table in the first place.
try:
# Look to see if any new config items were added that aren't currently in db
for item in get_defaults():
if not ConfigItem.query.filter_by(name=item[0]).first():
newConfItem = ConfigItem(
name=item[0], type=item[1], value=item[2]
)
db.session.add(newConfItem)
db.session.commit()
conf = ConfigItem.query.all()
if not conf: # We'll hit this if the table exists but there's no data
populate_defaults(verbose=False)
conf = (
ConfigItem.query.all()
) # populate_defaults should populate data that we can query now
if not conf: # if it didn't, then we don't have config items that we need in order to run, so exit.
raise (SystemExit())
for item in conf:
if item.type == "int":
app.config[item.name] = int(item.value)
elif item.type == "bool":
if item.value == "True":
app.config[item.name] = True
else:
app.config[item.name] = False
elif item.type == "string":
app.config[item.name] = item.value
else:
print(
"Unsupported config type %s:%s:%s"
% (item.name, item.type, item.value)
)
except Exception:
print("ConfigItem table doesn't exist yet. Ignore if flask db upgrade.")
from app.models import NatlasServices
try:
current_services = NatlasServices.query.order_by(
NatlasServices.id.desc()
).first()
if current_services:
app.current_services = current_services.as_dict()
else: # Let's populate server defaults
defaultServices = (
open(
os.path.join(
app.config["BASEDIR"], "defaults/natlas-services"
)
)
.read()
.rstrip("\r\n")
)
defaultSha = hashlib.sha256(defaultServices.encode()).hexdigest()
current_services = NatlasServices(
sha256=defaultSha, services=defaultServices
) # default values until we load something
db.session.add(current_services)
db.session.commit()
print(
"NatlasServices populated with defaults from defaults/natlas-services"
)
app.current_services = current_services.as_dict()
except Exception:
print(
"NatlasServices table doesn't exist yet. Ignore if flask db upgrade."
)
# Load the current agent config, otherwise create it.
from app.models import AgentConfig
try:
agentConfig = AgentConfig.query.get(
1
) # the agent config is updated in place so only 1 record
if agentConfig:
app.agentConfig = agentConfig.as_dict()
else:
newAgentConfig = (
AgentConfig()
) # populate an agent config with database defaults
db.session.add(newAgentConfig)
db.session.commit()
print("AgentConfig populated with defaults")
app.agentConfig = newAgentConfig.as_dict()
except Exception:
print(
"AgentConfig table doesn't exist yet. Ignore if flask db upgrade."
)
# Load the current agent config, otherwise create it.
from app.models import AgentScript
try:
agentScripts = AgentScript.query.all()
if not agentScripts:
defaultAgentScript = AgentScript(name="default")
db.session.add(defaultAgentScript)
db.session.commit()
print("AgentScript populated with default")
agentScripts = AgentScript.query.all()
app.agentScripts = agentScripts
app.agentScriptStr = AgentScript.getScriptsString(
scriptList=agentScripts
)
except Exception:
print(
"AgentScript table doesn't exist yet. Ignore if flask db upgrade."
)
# Grungy thing so we can use flask db and flask shell before the config items are initially populated
if "ELASTICSEARCH_URL" in app.config:
app.elastic = ElasticInterface(app.config["ELASTICSEARCH_URL"])
app.ScopeManager = ScopeManager()
from app.errors import bp as errors_bp
app.register_blueprint(errors_bp)
from app.admin import bp as admin_bp
app.register_blueprint(admin_bp, url_prefix="/admin")
from app.api import bp as api_bp
app.register_blueprint(api_bp, url_prefix="/api")
csrf.exempt(api_bp)
from app.auth import bp as auth_bp
app.register_blueprint(auth_bp, url_prefix="/auth")
from app.user import bp as user_bp
app.register_blueprint(user_bp, url_prefix="/user")
from app.host import bp as host_bp
app.register_blueprint(host_bp, url_prefix="/host")
from app.main import bp as main_bp
app.register_blueprint(main_bp)
from app.filters import bp as filters_bp
app.register_blueprint(filters_bp)
return app
|
def create_app(config_class=Config, load_config=False):
initialize_sentryio(config_class)
app = Flask(__name__)
app.config.from_object(Config)
app.jinja_env.add_extension("jinja2.ext.do")
db.init_app(app)
migrate.init_app(app, db)
login.init_app(app)
mail.init_app(app)
csrf.init_app(app)
if load_config:
print("Loading Config from database")
with app.app_context():
from app.models import ConfigItem
# This is gross but we need it because otherwise flask db operations won't work to create the ConfigItem table in the first place.
try:
# Look to see if any new config items were added that aren't currently in db
for item in get_defaults():
if not ConfigItem.query.filter_by(name=item[0]).first():
newConfItem = ConfigItem(
name=item[0], type=item[1], value=item[2]
)
db.session.add(newConfItem)
db.session.commit()
conf = ConfigItem.query.all()
if not conf: # We'll hit this if the table exists but there's no data
populate_defaults(verbose=False)
conf = (
ConfigItem.query.all()
) # populate_defaults should populate data that we can query now
if not conf: # if it didn't, then we don't have config items that we need in order to run, so exit.
raise (SystemExit())
for item in conf:
if item.type == "int":
app.config[item.name] = int(item.value)
elif item.type == "bool":
if item.value == "True":
app.config[item.name] = True
else:
app.config[item.name] = False
elif item.type == "string":
app.config[item.name] = item.value
else:
print(
"Unsupported config type %s:%s:%s"
% (item.name, item.type, item.value)
)
except Exception:
print("ConfigItem table doesn't exist yet. Ignore if flask db upgrade.")
from app.models import NatlasServices
try:
current_services = NatlasServices.query.order_by(
NatlasServices.id.desc()
).first()
if current_services:
app.current_services = current_services.as_dict()
else: # Let's populate server defaults
defaultServices = (
open(
os.path.join(
app.config["BASEDIR"], "defaults/natlas-services"
)
)
.read()
.rstrip("\r\n")
)
defaultSha = hashlib.sha256(defaultServices.encode()).hexdigest()
current_services = NatlasServices(
sha256=defaultSha, services=defaultServices
) # default values until we load something
db.session.add(current_services)
db.session.commit()
print(
"NatlasServices populated with defaults from defaults/natlas-services"
)
app.current_services = current_services.as_dict()
except Exception:
print(
"NatlasServices table doesn't exist yet. Ignore if flask db upgrade."
)
# Load the current agent config, otherwise create it.
from app.models import AgentConfig
try:
agentConfig = AgentConfig.query.get(
1
) # the agent config is updated in place so only 1 record
if agentConfig:
app.agentConfig = agentConfig.as_dict()
else:
newAgentConfig = (
AgentConfig()
) # populate an agent config with database defaults
db.session.add(newAgentConfig)
db.session.commit()
print("AgentConfig populated with defaults")
app.agentConfig = newAgentConfig.as_dict()
except Exception:
print(
"AgentConfig table doesn't exist yet. Ignore if flask db upgrade."
)
# Load the current agent config, otherwise create it.
from app.models import AgentScript
try:
agentScripts = AgentScript.query.all()
if not agentScripts:
defaultAgentScript = AgentScript(name="default")
db.session.add(defaultAgentScript)
db.session.commit()
print("AgentScript populated with default")
agentScripts = AgentScript.query.all()
app.agentScripts = agentScripts
app.agentScriptStr = AgentScript.getScriptsString(
scriptList=agentScripts
)
except Exception:
print(
"AgentScript table doesn't exist yet. Ignore if flask db upgrade."
)
# Grungy thing so we can use flask db and flask shell before the config items are initially populated
if "ELASTICSEARCH_URL" in app.config:
app.elastic = Elastic(app.config["ELASTICSEARCH_URL"])
app.ScopeManager = ScopeManager()
from app.errors import bp as errors_bp
app.register_blueprint(errors_bp)
from app.admin import bp as admin_bp
app.register_blueprint(admin_bp, url_prefix="/admin")
from app.api import bp as api_bp
app.register_blueprint(api_bp, url_prefix="/api")
csrf.exempt(api_bp)
from app.auth import bp as auth_bp
app.register_blueprint(auth_bp, url_prefix="/auth")
from app.user import bp as user_bp
app.register_blueprint(user_bp, url_prefix="/user")
from app.host import bp as host_bp
app.register_blueprint(host_bp, url_prefix="/host")
from app.main import bp as main_bp
app.register_blueprint(main_bp)
from app.filters import bp as filters_bp
app.register_blueprint(filters_bp)
return app
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def admin():
configForm = forms.ConfigForm()
configItems = current_app.config
if configForm.validate_on_submit():
for fieldname, fieldvalue in configForm.data.items():
if fieldname.upper() in ["SUBMIT", "CSRF_TOKEN"]:
continue
current_app.config[fieldname.upper()] = fieldvalue
confitem = ConfigItem.query.filter_by(name=fieldname.upper()).first()
confitem.value = str(fieldvalue)
db.session.add(confitem)
db.session.commit()
return render_template(
"admin/index.html", configForm=configForm, configItems=configItems
)
|
def admin():
configForm = forms.ConfigForm()
configItems = current_app.config
if configForm.validate_on_submit():
for fieldname, fieldvalue in configForm.data.items():
if fieldname.upper() in ["SUBMIT", "CSRF_TOKEN"]:
continue
# if we've got a new elasticsearch address, update our current handle to elastic
if (
fieldname.upper() == "ELASTICSEARCH_URL"
and fieldvalue != current_app.config["ELASTICSEARCH_URL"]
):
current_app.elastic = Elastic(fieldvalue)
current_app.config[fieldname.upper()] = fieldvalue
confitem = ConfigItem.query.filter_by(name=fieldname.upper()).first()
confitem.value = str(fieldvalue)
db.session.add(confitem)
db.session.commit()
return render_template(
"admin/index.html", configForm=configForm, configItems=configItems
)
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def deleteScan(scan_id):
delForm = forms.DeleteForm()
if delForm.validate_on_submit():
deleted = current_app.elastic.delete_scan(scan_id)
if deleted in [1, 2]:
flash("Successfully deleted scan %s." % scan_id, "success")
if request.referrer:
if scan_id in request.referrer:
redirectLoc = request.referrer.rsplit(scan_id)[0]
else:
redirectLoc = request.referrer
else:
redirectLoc = url_for("main.browse")
return redirect(redirectLoc)
else:
flash("Could not delete scan %s." % scan_id, "danger")
return redirect(request.referrer or url_for("main.browse"))
else:
flash("Couldn't validate form!")
return redirect(request.referrer)
|
def deleteScan(scan_id):
delForm = forms.DeleteForm()
if delForm.validate_on_submit():
deleted = current_app.elastic.delete_scan(scan_id)
if deleted in [1, 2]:
flash("Successfully deleted scan %s." % scan_id, "success")
if request.referrer:
if scan_id in request.referrer:
redirectLoc = request.referrer.rsplit(scan_id)[0]
else:
redirectLoc = request.referrer
else:
redirectLoc = url_for("main.search")
return redirect(redirectLoc)
else:
flash("Could not delete scan %s." % scan_id, "danger")
return redirect(request.referrer or url_for("main.search"))
else:
flash("Couldn't validate form!")
return redirect(request.referrer)
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def deleteHost(ip):
delForm = forms.DeleteForm()
if delForm.validate_on_submit():
deleted = current_app.elastic.delete_host(ip)
if deleted > 0:
flash(
f"Successfully deleted {deleted - 1 if deleted > 1 else deleted} scans for {ip}",
"success",
)
return redirect(url_for("main.browse"))
else:
flash(f"Couldn't delete host: {ip}", "danger")
else:
flash("Couldn't validate form!")
return redirect(request.referrer)
|
def deleteHost(ip):
delForm = forms.DeleteForm()
if delForm.validate_on_submit():
deleted = current_app.elastic.delete_host(ip)
if deleted > 0:
flash("Successfully deleted host %s" % ip, "success")
return redirect(url_for("main.search"))
else:
flash("Couldn't delete host: %s" % ip, "danger")
else:
flash("Couldn't validate form!")
return redirect(request.referrer)
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def get_unique_scan_id():
scan_id = ""
while scan_id == "":
rand = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(16)
)
count, context = current_app.elastic.get_host_by_scan_id(rand)
if count == 0:
scan_id = rand
return scan_id
|
def get_unique_scan_id():
scan_id = ""
while scan_id == "":
rand = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(16)
)
count, context = current_app.elastic.gethost_scan_id(rand)
if count == 0:
scan_id = rand
return scan_id
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def submit():
status_code = None
response_body = None
data = request.get_json()
newhost = {}
newhost = json.loads(data)
newhost["ctime"] = dt.now(tz.utc)
if newhost["scan_reason"] == "requested":
mark_scan_completed(newhost["ip"], newhost["scan_id"])
try:
nmap = NmapParser.parse(newhost["xml_data"])
# If there's more or less than 1 host in the xml data, reject it (for now)
if nmap.hosts_total != 1:
status_code = 400
response_body = json.dumps(
{
"status": status_code,
"message": "XML had too many hosts in it",
"retry": False,
}
)
# If it's not an acceptable target, tell the agent it's out of scope
elif not current_app.ScopeManager.isAcceptableTarget(nmap.hosts[0].address):
status_code = 400
response_body = json.dumps(
{
"status": status_code,
"message": "Out of scope: " + nmap.hosts[0].address,
"retry": False,
}
)
# If there's no further processing to do, store the host and prepare the response
elif not newhost["is_up"] or (newhost["is_up"] and newhost["port_count"] == 0):
current_app.elastic.new_result(newhost)
status_code = 200
response_body = json.dumps(
{"status": status_code, "message": "Received: " + newhost["ip"]}
)
except NmapParserException:
status_code = 400
response_body = json.dumps(
{
"status": status_code,
"message": "Invalid nmap xml data provided",
"retry": False,
}
)
# If status_code and response_body have been set by this point, return a response.
if status_code and response_body:
response = Response(
response=response_body, status=status_code, content_type=json_content
)
return response
if newhost["scan_start"] and newhost["scan_stop"]:
elapsed = dateutil.parser.parse(newhost["scan_stop"]) - dateutil.parser.parse(
newhost["scan_start"]
)
newhost["elapsed"] = elapsed.seconds
newhost["ip"] = nmap.hosts[0].address
if len(nmap.hosts[0].hostnames) > 0:
newhost["hostname"] = nmap.hosts[0].hostnames[0]
tmpports = []
newhost["ports"] = []
for port in nmap.hosts[0].get_open_ports():
tmpports.append(str(port[0]))
srv = nmap.hosts[0].get_service(port[0], port[1])
portinfo = srv.get_dict()
portinfo["service"] = srv.service_dict
portinfo["scripts"] = []
for script in srv.scripts_results:
scriptsave = {"id": script["id"], "output": script["output"]}
portinfo["scripts"].append(scriptsave)
if script["id"] == "ssl-cert":
portinfo["ssl"] = parse_ssl_data(script)
newhost["ports"].append(portinfo)
newhost["port_str"] = ", ".join(tmpports)
if "screenshots" in newhost and newhost["screenshots"]:
newhost["screenshots"], newhost["num_screenshots"] = process_screenshots(
newhost["screenshots"]
)
if len(newhost["ports"]) == 0:
status_code = 200
response_body = json.dumps(
{
"status": status_code,
"message": "Expected open ports but didn't find any for %s"
% newhost["ip"],
}
)
elif len(newhost["ports"]) > 500:
status_code = 200
response_body = json.dumps(
{
"status": status_code,
"message": "More than 500 ports found, throwing data out",
}
)
else:
status_code = 200
current_app.elastic.new_result(newhost)
response_body = json.dumps(
{
"status": status_code,
"message": "Received %s ports for %s"
% (len(newhost["ports"]), newhost["ip"]),
}
)
response = Response(
response=response_body, status=status_code, content_type=json_content
)
return response
|
def submit():
status_code = None
response_body = None
data = request.get_json()
newhost = {}
newhost = json.loads(data)
newhost["ctime"] = dt.now(tz.utc)
if newhost["scan_reason"] == "requested":
mark_scan_completed(newhost["ip"], newhost["scan_id"])
try:
nmap = NmapParser.parse(newhost["xml_data"])
# If there's more or less than 1 host in the xml data, reject it (for now)
if nmap.hosts_total != 1:
status_code = 400
response_body = json.dumps(
{
"status": status_code,
"message": "XML had too many hosts in it",
"retry": False,
}
)
# If it's not an acceptable target, tell the agent it's out of scope
elif not current_app.ScopeManager.isAcceptableTarget(nmap.hosts[0].address):
status_code = 400
response_body = json.dumps(
{
"status": status_code,
"message": "Out of scope: " + nmap.hosts[0].address,
"retry": False,
}
)
# If there's no further processing to do, store the host and prepare the response
elif not newhost["is_up"] or (newhost["is_up"] and newhost["port_count"] == 0):
current_app.elastic.newhost(newhost)
status_code = 200
response_body = json.dumps(
{"status": status_code, "message": "Received: " + newhost["ip"]}
)
except NmapParserException:
status_code = 400
response_body = json.dumps(
{
"status": status_code,
"message": "Invalid nmap xml data provided",
"retry": False,
}
)
# If status_code and response_body have been set by this point, return a response.
if status_code and response_body:
response = Response(
response=response_body, status=status_code, content_type=json_content
)
return response
if newhost["scan_start"] and newhost["scan_stop"]:
elapsed = dateutil.parser.parse(newhost["scan_stop"]) - dateutil.parser.parse(
newhost["scan_start"]
)
newhost["elapsed"] = elapsed.seconds
newhost["ip"] = nmap.hosts[0].address
if len(nmap.hosts[0].hostnames) > 0:
newhost["hostname"] = nmap.hosts[0].hostnames[0]
tmpports = []
newhost["ports"] = []
for port in nmap.hosts[0].get_open_ports():
tmpports.append(str(port[0]))
srv = nmap.hosts[0].get_service(port[0], port[1])
portinfo = srv.get_dict()
portinfo["service"] = srv.service_dict
portinfo["scripts"] = []
for script in srv.scripts_results:
scriptsave = {"id": script["id"], "output": script["output"]}
portinfo["scripts"].append(scriptsave)
if script["id"] == "ssl-cert":
portinfo["ssl"] = parse_ssl_data(script)
newhost["ports"].append(portinfo)
newhost["port_str"] = ", ".join(tmpports)
if "screenshots" in newhost and newhost["screenshots"]:
newhost["screenshots"], newhost["num_screenshots"] = process_screenshots(
newhost["screenshots"]
)
if len(newhost["ports"]) == 0:
status_code = 200
response_body = json.dumps(
{
"status": status_code,
"message": "Expected open ports but didn't find any for %s"
% newhost["ip"],
}
)
elif len(newhost["ports"]) > 500:
status_code = 200
response_body = json.dumps(
{
"status": status_code,
"message": "More than 500 ports found, throwing data out",
}
)
else:
status_code = 200
current_app.elastic.newhost(newhost)
response_body = json.dumps(
{
"status": status_code,
"message": "Received %s ports for %s"
% (len(newhost["ports"]), newhost["ip"]),
}
)
response = Response(
response=response_body, status=status_code, content_type=json_content
)
return response
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def host_history(ip):
info, context = hostinfo(ip)
page = int(request.args.get("p", 1))
searchOffset = current_user.results_per_page * (page - 1)
delHostForm = DeleteForm()
rescanForm = RescanForm()
count, context = current_app.elastic.get_host_history(
ip, current_user.results_per_page, searchOffset
)
if count == 0:
abort(404)
next_url = (
url_for("host.host_history", ip=ip, p=page + 1)
if count > page * current_user.results_per_page
else None
)
prev_url = url_for("host.host_history", ip=ip, p=page - 1) if page > 1 else None
# TODO Hardcoding the version here is bad. Revisit this.
return render_template(
"host/versions/0.6.5/history.html",
ip=ip,
info=info,
page=page,
numresults=count,
hosts=context,
next_url=next_url,
prev_url=prev_url,
delHostForm=delHostForm,
rescanForm=rescanForm,
)
|
def host_history(ip):
info, context = hostinfo(ip)
page = int(request.args.get("p", 1))
searchOffset = current_user.results_per_page * (page - 1)
delHostForm = DeleteForm()
rescanForm = RescanForm()
count, context = current_app.elastic.gethost_history(
ip, current_user.results_per_page, searchOffset
)
if count == 0:
abort(404)
next_url = (
url_for("host.host_history", ip=ip, p=page + 1)
if count > page * current_user.results_per_page
else None
)
prev_url = url_for("host.host_history", ip=ip, p=page - 1) if page > 1 else None
# TODO Hardcoding the version here is bad. Revisit this.
return render_template(
"host/versions/0.6.5/history.html",
ip=ip,
info=info,
page=page,
numresults=count,
hosts=context,
next_url=next_url,
prev_url=prev_url,
delHostForm=delHostForm,
rescanForm=rescanForm,
)
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def host_historical_result(ip, scan_id):
delForm = DeleteForm()
delHostForm = DeleteForm()
rescanForm = RescanForm()
info, context = hostinfo(ip)
count, context = current_app.elastic.get_host_by_scan_id(scan_id)
version = determine_data_version(context)
template_str = f"host/versions/{version}/summary.html"
return render_template(
template_str,
host=context,
info=info,
**context,
delForm=delForm,
delHostForm=delHostForm,
rescanForm=rescanForm,
)
|
def host_historical_result(ip, scan_id):
delForm = DeleteForm()
delHostForm = DeleteForm()
rescanForm = RescanForm()
info, context = hostinfo(ip)
count, context = current_app.elastic.gethost_scan_id(scan_id)
version = determine_data_version(context)
template_str = f"host/versions/{version}/summary.html"
return render_template(
template_str,
host=context,
info=info,
**context,
delForm=delForm,
delHostForm=delHostForm,
rescanForm=rescanForm,
)
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def export_scan(ip, scan_id, ext):
if ext not in ["xml", "nmap", "gnmap", "json"]:
abort(404)
export_field = f"{ext}_data"
if ext == "json":
mime = "application/json"
else:
mime = "text/plain"
count, context = current_app.elastic.get_host_by_scan_id(scan_id)
if ext == "json" and count > 0:
return Response(json.dumps(context), mimetype=mime)
elif count > 0 and export_field in context:
return Response(context[export_field], mimetype=mime)
else:
abort(404)
|
def export_scan(ip, scan_id, ext):
if ext not in ["xml", "nmap", "gnmap", "json"]:
abort(404)
export_field = f"{ext}_data"
if ext == "json":
mime = "application/json"
else:
mime = "text/plain"
count, context = current_app.elastic.gethost_scan_id(scan_id)
if ext == "json" and count > 0:
return Response(json.dumps(context), mimetype=mime)
elif count > 0 and export_field in context:
return Response(context[export_field], mimetype=mime)
else:
abort(404)
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def random_host():
random_host = current_app.elastic.random_host()
# This would most likely occur when there are no hosts up in the index, so just throw a 404
if not random_host:
abort(404)
ip = random_host["ip"]
info, context = hostinfo(ip)
delForm = DeleteForm()
delHostForm = DeleteForm()
rescanForm = RescanForm()
version = determine_data_version(context)
template_str = f"host/versions/{version}/summary.html"
return render_template(
template_str,
**context,
host=context,
info=info,
delForm=delForm,
delHostForm=delHostForm,
rescanForm=rescanForm,
)
|
def random_host():
random_host = current_app.elastic.random_host()
if not random_host:
abort(404)
ip = random_host["hits"]["hits"][0]["_source"]["ip"]
info, context = hostinfo(ip)
delForm = DeleteForm()
delHostForm = DeleteForm()
rescanForm = RescanForm()
version = determine_data_version(context)
template_str = f"host/versions/{version}/summary.html"
return render_template(
template_str,
**context,
host=context,
info=info,
delForm=delForm,
delHostForm=delHostForm,
rescanForm=rescanForm,
)
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def hostinfo(ip):
hostinfo = {}
count, context = current_app.elastic.get_host(ip)
if count == 0:
return abort(404)
hostinfo["history"] = count
screenshot_count = current_app.elastic.count_host_screenshots(ip)
hostinfo["screenshot_count"] = screenshot_count
screenshots = 0
screenshotTypes = [
"screenshots",
"headshot",
"vncheadshot",
"httpheadshot",
"httpsheadshot",
]
for hs in screenshotTypes:
if context.get(hs):
if hs == "screenshots":
# 0.6.5 iterating screenshots instead of screenshot types
for item in context.get(hs):
screenshots += 1
else:
screenshots += 1
hostinfo["screenshots"] = screenshots
if context.get("hostname"):
hostinfo["hostname"] = context.get("hostname")
return hostinfo, context
|
def hostinfo(ip):
hostinfo = {}
count, context = current_app.elastic.gethost(ip)
if count == 0:
return abort(404)
hostinfo["history"] = count
screenshot_count = current_app.elastic.count_host_screenshots(ip)
hostinfo["screenshot_count"] = screenshot_count
screenshots = 0
screenshotTypes = [
"screenshots",
"headshot",
"vncheadshot",
"httpheadshot",
"httpsheadshot",
]
for hs in screenshotTypes:
if context.get(hs):
if hs == "screenshots":
# 0.6.5 iterating screenshots instead of screenshot types
for item in context.get(hs):
screenshots += 1
else:
screenshots += 1
hostinfo["screenshots"] = screenshots
if context.get("hostname"):
hostinfo["hostname"] = context.get("hostname")
return hostinfo, context
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def index():
return redirect(url_for("main.browse"))
|
def index():
return redirect(url_for("main.search"))
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def search():
"""Return search results for a given query"""
query = request.args.get("query", "")
page = int(request.args.get("page", 1))
format = request.args.get("format", "")
scan_ids = request.args.get("includeScanIDs", "")
includeHistory = request.args.get("includeHistory", False)
results_per_page, search_offset = results_offset(page)
searchIndex = "nmap_history" if includeHistory else "nmap"
count, context = current_app.elastic.search(
results_per_page, search_offset, query=query, searchIndex=searchIndex
)
totalHosts = current_app.elastic.total_hosts()
if includeHistory:
next_url, prev_url = build_pagination_urls(
"main.search", page, count, query=query, includeHistory=includeHistory
)
else:
next_url, prev_url = build_pagination_urls(
"main.search", page, count, query=query
)
# what kind of output are we looking for?
if format == "hostlist":
hostlist = []
for host in context:
if scan_ids:
hostlist.append(str(host["ip"]) + "," + str(host["scan_id"]))
else:
hostlist.append(str(host["ip"]))
return Response("\n".join(hostlist), mimetype="text/plain")
else:
return render_template(
"main/search.html",
query=query,
numresults=count,
totalHosts=totalHosts,
page=page,
hosts=context,
next_url=next_url,
prev_url=prev_url,
)
|
def search():
query = request.args.get("query", "")
page = int(request.args.get("page", 1))
format = request.args.get("format", "")
scan_ids = request.args.get("includeScanIDs", "")
includeHistory = request.args.get("includeHistory", False)
results_per_page = current_user.results_per_page
if includeHistory:
searchIndex = "nmap_history"
else:
searchIndex = "nmap"
searchOffset = results_per_page * (page - 1)
count, context = current_app.elastic.search(
query, results_per_page, searchOffset, searchIndex=searchIndex
)
totalHosts = current_app.elastic.totalHosts()
if includeHistory:
next_url = (
url_for(
"main.search", query=query, page=page + 1, includeHistory=includeHistory
)
if count > page * results_per_page
else None
)
prev_url = (
url_for(
"main.search", query=query, page=page - 1, includeHistory=includeHistory
)
if page > 1
else None
)
else:
next_url = (
url_for("main.search", query=query, page=page + 1)
if count > page * results_per_page
else None
)
prev_url = (
url_for("main.search", query=query, page=page - 1) if page > 1 else None
)
# what kind of output are we looking for?
if format == "hostlist":
hostlist = []
for host in context:
if scan_ids:
hostlist.append(str(host["ip"]) + "," + str(host["scan_id"]))
else:
hostlist.append(str(host["ip"]))
return Response("\n".join(hostlist), mimetype="text/plain")
else:
return render_template(
"search.html",
query=query,
numresults=count,
totalHosts=totalHosts,
page=page,
hosts=context,
next_url=next_url,
prev_url=prev_url,
)
|
https://github.com/natlas/natlas/issues/221
|
Nov 05 17:22:50 natlas-server bash[10026]: [2019-11-05 17:22:50,681] ERROR in app: Exception on /api/getwork [GET]
Nov 05 17:22:50 natlas-server bash[10026]: Traceback (most recent call last):
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
Nov 05 17:22:50 natlas-server bash[10026]: response = self.full_dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.handle_user_exception(e)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
Nov 05 17:22:50 natlas-server bash[10026]: reraise(exc_type, exc_value, tb)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
Nov 05 17:22:50 natlas-server bash[10026]: raise value
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: rv = self.dispatch_request()
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/venv/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
Nov 05 17:22:50 natlas-server bash[10026]: return self.view_functions[rule.endpoint](**req.view_args)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/auth/wrappers.py", line 41, in decorated_function
Nov 05 17:22:50 natlas-server bash[10026]: return f(*args, **kwargs)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/api/routes.py", line 70, in getwork
Nov 05 17:22:50 natlas-server bash[10026]: count, context = current_app.elastic.gethost_scan_id(rand)
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 238, in gethost_scan_id
Nov 05 17:22:50 natlas-server bash[10026]: if not self.checkStatus():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 60, in checkStatus
Nov 05 17:22:50 natlas-server bash[10026]: if not self.attemptReconnect():
Nov 05 17:22:50 natlas-server bash[10026]: File "/opt/natlas/natlas-server/app/elastic.py", line 43, in attemptReconnect
Nov 05 17:22:50 natlas-server bash[10026]: delta = now - self.lastReconnectAttempt
Nov 05 17:22:50 natlas-server bash[10026]: TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType
|
TypeError
|
def importScope(line, blacklist):
failedImports = []
alreadyExists = []
successImports = []
if len(line.split(",")) > 1:
ip = line.split(",")[0]
tags = line.split(",")[1:]
else:
ip = line
tags = []
if "/" not in ip:
ip = ip + "/32"
try:
isValid = ipaddress.ip_network(
ip, False
) # False will mask out hostbits for us, ip_network for eventual ipv6 compat
except ValueError as e:
failedImports.append(
line
) # if we hit this ValueError it means that the input couldn't be a CIDR range
return failedImports, alreadyExists, successImports
item = ScopeItem.query.filter_by(
target=isValid.with_prefixlen
).first() # We only want scope items with masked out host bits
if item:
# Add in look for tags and append as necessary
if tags:
ScopeItem.addTags(item, tags)
alreadyExists.append(isValid.with_prefixlen)
return failedImports, alreadyExists, successImports
else:
newTarget = ScopeItem(target=isValid.with_prefixlen, blacklist=blacklist)
db.session.add(newTarget)
if tags:
ScopeItem.addTags(newTarget, tags)
successImports.append(isValid.with_prefixlen)
return failedImports, alreadyExists, successImports
|
def importScope(line, blacklist):
failedImports = []
alreadyExists = []
successImports = []
if len(line.split(",")) > 1:
ip = line.split(",")[0]
tags = line.split(",")[1:]
else:
ip = line
if "/" not in ip:
ip = ip + "/32"
try:
isValid = ipaddress.ip_network(
ip, False
) # False will mask out hostbits for us, ip_network for eventual ipv6 compat
except ValueError as e:
failedImports.append(
line
) # if we hit this ValueError it means that the input couldn't be a CIDR range
return failedImports, alreadyExists, successImports
item = ScopeItem.query.filter_by(
target=isValid.with_prefixlen
).first() # We only want scope items with masked out host bits
if item:
# Add in look for tags and append as necessary
if tags:
ScopeItem.addTags(item, tags)
alreadyExists.append(isValid.with_prefixlen)
return failedImports, alreadyExists, successImports
else:
newTarget = ScopeItem(target=isValid.with_prefixlen, blacklist=blacklist)
db.session.add(newTarget)
if tags:
ScopeItem.addTags(newTarget, tags)
successImports.append(isValid.with_prefixlen)
return failedImports, alreadyExists, successImports
|
https://github.com/natlas/natlas/issues/196
|
python3 add-scope.py --scope scope.txt --verbose
Traceback (most recent call last):
File "add-scope.py", line 58, in <module>
main()
File "add-scope.py", line 53, in main
importScope(args.scope, False, args.verbose)
File "add-scope.py", line 23, in importScope
fail, exist, success = ScopeItem.importScope(line, blacklist)
File "/opt/natlas2/natlas-server/app/models.py", line 156, in importScope
if tags:
UnboundLocalError: local variable 'tags' referenced before assignment
|
UnboundLocalError
|
def scan(target_data=None):
if not validate_target(target_data["target"]):
return ERR["INVALIDTARGET"]
result = {}
# If agent authentication is required, this agent id has to match a server side agent id
# If it's not required and an agent_id is set, we'll use that in scan data
# If it's not required and an agent_id is not set, we'll consider it an anonymous scan.
if config.agent_id:
result["agent"] = config.agent_id
else:
result["agent"] = "anonymous"
result["agent_version"] = config.NATLAS_VERSION
target = target_data["target"]
result["ip"] = target
result["scan_reason"] = target_data["scan_reason"]
result["tags"] = target_data["tags"]
scan_id = target_data["scan_id"]
result["scan_id"] = scan_id
agentConfig = target_data["agent_config"]
result["scan_start"] = datetime.now(timezone.utc).isoformat()
command = [
"nmap",
"-oA",
"data/natlas." + scan_id,
"--servicedb",
"./natlas-services",
]
if agentConfig["versionDetection"]:
command.append("-sV")
if agentConfig["osDetection"]:
command.append("-O")
if agentConfig["enableScripts"] and agentConfig["scripts"]:
command.append("--script")
command.append(agentConfig["scripts"])
if agentConfig["scriptTimeout"]:
command.append("--script-timeout")
command.append(str(agentConfig["scriptTimeout"]))
if agentConfig["hostTimeout"]:
command.append("--host-timeout")
command.append(str(agentConfig["hostTimeout"]))
if agentConfig["osScanLimit"]:
command.append("--osscan-limit")
if agentConfig["noPing"]:
command.append("-Pn")
if agentConfig["onlyOpens"]:
command.append("--open")
if agentConfig["udpScan"]:
command.append("-sUS")
command.append(target_data["target"])
TIMEDOUT = False
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
)
try:
out, err = process.communicate(timeout=int(agentConfig["scanTimeout"]))
except:
try:
TIMEDOUT = True
print_err("Scan %s timed out" % scan_id)
process.kill()
except:
pass
if TIMEDOUT:
result["is_up"] = False
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
result["timed_out"] = True
cleanup_files(scan_id)
print_info("Submitting scan timeout notice for %s" % result["ip"])
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
else:
print_info("Scan %s Complete" % scan_id)
for ext in "nmap", "gnmap", "xml":
try:
result[ext + "_data"] = open("data/natlas." + scan_id + "." + ext).read()
except:
print_err("Couldn't read natlas.%s.%s" % (scan_id, ext))
return ERR["DATANOTFOUND"]
try:
nmap_report = NmapParser.parse(result["xml_data"])
except NmapParserException:
print_err("Couldn't parse natlas.%s.xml" % (scan_id))
return ERR["DATANOTFOUND"]
if nmap_report.hosts_total < 1:
print_err("No hosts found in scan data")
return "[!] No hosts found in scan data"
elif nmap_report.hosts_total > 1:
print_err("Too many hosts found in scan data")
return "[!] Too many hosts found in scan data"
elif nmap_report.hosts_down == 1:
# host is down
result["is_up"] = False
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting host down notice for %s" % (result["ip"]))
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
elif nmap_report.hosts_up == 1 and len(nmap_report.hosts) == 0:
# host is up but no reportable ports were found
result["is_up"] = True
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting %s ports for %s" % (result["port_count"], result["ip"]))
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
else:
# host is up and reportable ports were found
result["is_up"] = nmap_report.hosts[0].is_up()
result["port_count"] = len(nmap_report.hosts[0].get_ports())
result["screenshots"] = []
if (
target_data["agent_config"]["webScreenshots"]
and shutil.which("aquatone") is not None
):
targetServices = []
if "80/tcp" in result["nmap_data"]:
targetServices.append("http")
if "443/tcp" in result["nmap_data"]:
targetServices.append("https")
if len(targetServices) > 0:
print_info(
"Attempting to take %s screenshot(s) for %s"
% (", ".join(targetServices).upper(), result["ip"])
)
screenshotutils.runAquatone(target, scan_id, targetServices)
serviceMapping = {"http": 80, "https": 443}
for service in targetServices:
screenshotPath = (
"data/aquatone."
+ scan_id
+ "/screenshots/"
+ service
+ "__"
+ target.replace(".", "_")
+ ".png"
)
if not os.path.isfile(screenshotPath):
continue
result["screenshots"].append(
{
"host": target,
"port": serviceMapping[service],
"service": service.upper(),
"data": str(base64.b64encode(open(screenshotPath, "rb").read()))[
2:-1
],
}
)
print_info(
"%s screenshot acquired for %s" % (service.upper(), result["ip"])
)
if (
target_data["agent_config"]["vncScreenshots"]
and shutil.which("vncsnapshot") is not None
):
if "5900/tcp" in result["nmap_data"]:
print_info("Attempting to take VNC screenshot for %s" % result["ip"])
if screenshotutils.runVNCSnapshot(target, scan_id) is True:
result["screenshots"].append(
{
"host": target,
"port": 5900,
"service": "VNC",
"data": str(
base64.b64encode(
open("data/natlas." + scan_id + ".vnc.jpg", "rb").read()
)
)[2:-1],
}
)
print_info("VNC screenshot acquired for %s" % result["ip"])
else:
print_err("Failed to acquire screenshot for %s" % result["ip"])
# submit result
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting %s ports for %s" % (result["port_count"], result["ip"]))
response = backoff_request(
giveup=True, endpoint="/api/submit", reqType="POST", postData=json.dumps(result)
)
|
def scan(target_data=None):
if not validate_target(target_data["target"]):
return ERR["INVALIDTARGET"]
result = {}
# If agent authentication is required, this agent id has to match a server side agent id
# If it's not required and an agent_id is set, we'll use that in scan data
# If it's not required and an agent_id is not set, we'll consider it an anonymous scan.
if config.agent_id:
result["agent"] = config.agent_id
else:
result["agent"] = "anonymous"
result["agent_version"] = config.NATLAS_VERSION
target = target_data["target"]
result["ip"] = target
result["scan_reason"] = target_data["scan_reason"]
result["tags"] = target_data["tags"]
scan_id = target_data["scan_id"]
result["scan_id"] = scan_id
agentConfig = target_data["agent_config"]
result["scan_start"] = datetime.now(timezone.utc).isoformat()
command = [
"nmap",
"-oA",
"data/natlas." + scan_id,
"--servicedb",
"./natlas-services",
]
if agentConfig["versionDetection"]:
command.append("-sV")
if agentConfig["osDetection"]:
command.append("-O")
if agentConfig["enableScripts"] and agentConfig["scripts"]:
command.append("--script")
command.append(agentConfig["scripts"])
if agentConfig["scriptTimeout"]:
command.append("--script-timeout")
command.append(str(agentConfig["scriptTimeout"]))
if agentConfig["hostTimeout"]:
command.append("--host-timeout")
command.append(str(agentConfig["hostTimeout"]))
if agentConfig["osScanLimit"]:
command.append("--osscan-limit")
if agentConfig["noPing"]:
command.append("-Pn")
if agentConfig["onlyOpens"]:
command.append("--open")
if agentConfig["udpScan"]:
command.append("-sUS")
command.append(target_data["target"])
TIMEDOUT = False
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
)
try:
out, err = process.communicate(timeout=int(agentConfig["scanTimeout"]))
except:
try:
TIMEDOUT = True
print_err("Scan %s timed out" % scan_id)
process.kill()
except:
pass
if TIMEDOUT:
result["is_up"] = False
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
result["timed_out"] = True
cleanup_files(scan_id)
print_info("Submitting scan timeout notice for %s" % result["ip"])
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
else:
print_info("Scan %s Complete" % scan_id)
for ext in "nmap", "gnmap", "xml":
try:
result[ext + "_data"] = open("data/natlas." + scan_id + "." + ext).read()
except:
print_err("Couldn't read natlas.%s.%s" % (scan_id, ext))
return ERR["DATANOTFOUND"]
nmap_report = NmapParser.parse(result["xml_data"])
if nmap_report.hosts_total < 1:
print_err("No hosts found in scan data")
return "[!] No hosts found in scan data"
elif nmap_report.hosts_total > 1:
print_err("Too many hosts found in scan data")
return "[!] Too many hosts found in scan data"
elif nmap_report.hosts_down == 1:
# host is down
result["is_up"] = False
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting host down notice for %s" % (result["ip"]))
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
elif nmap_report.hosts_up == 1 and len(nmap_report.hosts) == 0:
# host is up but no reportable ports were found
result["is_up"] = True
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting %s ports for %s" % (result["port_count"], result["ip"]))
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
else:
# host is up and reportable ports were found
result["is_up"] = nmap_report.hosts[0].is_up()
result["port_count"] = len(nmap_report.hosts[0].get_ports())
result["screenshots"] = []
if (
target_data["agent_config"]["webScreenshots"]
and shutil.which("aquatone") is not None
):
targetServices = []
if "80/tcp" in result["nmap_data"]:
targetServices.append("http")
if "443/tcp" in result["nmap_data"]:
targetServices.append("https")
if len(targetServices) > 0:
print_info(
"Attempting to take %s screenshot(s) for %s"
% (", ".join(targetServices).upper(), result["ip"])
)
screenshotutils.runAquatone(target, scan_id, targetServices)
serviceMapping = {"http": 80, "https": 443}
for service in targetServices:
screenshotPath = (
"data/aquatone."
+ scan_id
+ "/screenshots/"
+ service
+ "__"
+ target.replace(".", "_")
+ ".png"
)
if not os.path.isfile(screenshotPath):
continue
result["screenshots"].append(
{
"host": target,
"port": serviceMapping[service],
"service": service.upper(),
"data": str(base64.b64encode(open(screenshotPath, "rb").read()))[
2:-1
],
}
)
print_info(
"%s screenshot acquired for %s" % (service.upper(), result["ip"])
)
if (
target_data["agent_config"]["vncScreenshots"]
and shutil.which("vncsnapshot") is not None
):
if "5900/tcp" in result["nmap_data"]:
print_info("Attempting to take VNC screenshot for %s" % result["ip"])
if screenshotutils.runVNCSnapshot(target, scan_id) is True:
result["screenshots"].append(
{
"host": target,
"port": 5900,
"service": "VNC",
"data": str(
base64.b64encode(
open("data/natlas." + scan_id + ".vnc.jpg", "rb").read()
)
)[2:-1],
}
)
print_info("VNC screenshot acquired for %s" % result["ip"])
else:
print_err("Failed to acquire screenshot for %s" % result["ip"])
# submit result
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting %s ports for %s" % (result["port_count"], result["ip"]))
response = backoff_request(
giveup=True, endpoint="/api/submit", reqType="POST", postData=json.dumps(result)
)
|
https://github.com/natlas/natlas/issues/169
|
[+] Thread-1: Fetching Target from https://natlas.io
[+] Thread-1: [Server] Target: 191.222.148.70
[+] Thread-1: Scan fdrvra799fodfg4e Complete
Exception in thread Thread-1:
Traceback (most recent call last):
File "/opt/natlas/natlas-agent/venv/lib/python3.6/site-packages/libnmap/parser.py", line 90, in _parse_xml
root = ET.fromstring(nmap_data)
File "/usr/lib/python3.6/xml/etree/ElementTree.py", line 1315, in XML
return parser.close()
File "<string>", line None
xml.etree.ElementTree.ParseError: no element found: line 7, column 0
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "./natlas-agent.py", line 367, in run
result = scan(target_data)
File "./natlas-agent.py", line 267, in scan
nmap_report = NmapParser.parse(result['xml_data'])
File "/opt/natlas/natlas-agent/venv/lib/python3.6/site-packages/libnmap/parser.py", line 41, in parse
nmapobj = cls._parse_xml(nmap_data, incomplete)
File "/opt/natlas/natlas-agent/venv/lib/python3.6/site-packages/libnmap/parser.py", line 92, in _parse_xml
raise NmapParserException("Wrong XML structure: cannot parse data")
libnmap.parser.NmapParserException: Wrong XML structure: cannot parse data
|
xml.etree.ElementTree.ParseError
|
def cleanup_files(scan_id):
print_info("Cleaning up files for %s" % scan_id)
if os.path.isdir("data/aquatone.%s" % scan_id):
shutil.rmtree("data/aquatone.%s" % scan_id)
for file in glob.glob("data/natlas." + scan_id + ".*"):
try:
os.remove(file)
except:
print_err("Could not remove file %s" % file)
|
def cleanup_files(scan_id):
print_info("Cleaning up files for %s" % scan_id)
for file in glob.glob("data/*." + scan_id + ".*"):
os.remove(file)
|
https://github.com/natlas/natlas/issues/157
|
[!] (3ud7hcwtnz) Killing slacker process
[!] Thread-2: Failed to acquire HTTPS screenshot for 178.128.187.177
[+] Thread-2: Cleaning up files for 3ud7hcwtnz
Exception in thread Thread-2:
Traceback (most recent call last):
File "/usr/lib/python3.7/threading.py", line 917, in _bootstrap_inner
self.run()
File "./natlas-agent.py", line 359, in run
result = scan(target_data)
File "./natlas-agent.py", line 335, in scan
cleanup_files(scan_id)
File "./natlas-agent.py", line 181, in cleanup_files
os.remove(file)
IsADirectoryError: [Errno 21] Is a directory: 'data/aquatone.3ud7hcwtnz.https'
|
IsADirectoryError
|
def scan(target_data=None):
if not validate_target(target_data["target"]):
return ERR["INVALIDTARGET"]
result = {}
# If agent authentication is required, this agent id has to match a server side agent id
# If it's not required and an agent_id is set, we'll use that in scan data
# If it's not required and an agent_id is not set, we'll consider it an anonymous scan.
if config.agent_id:
result["agent"] = config.agent_id
else:
result["agent"] = "anonymous"
result["agent_version"] = config.NATLAS_VERSION
target = target_data["target"]
result["ip"] = target
result["scan_reason"] = target_data["scan_reason"]
result["tags"] = target_data["tags"]
scan_id = target_data["scan_id"]
result["scan_id"] = scan_id
agentConfig = target_data["agent_config"]
result["scan_start"] = datetime.now(timezone.utc).isoformat()
command = [
"nmap",
"-oA",
"data/natlas." + scan_id,
"--servicedb",
"./natlas-services",
]
if agentConfig["versionDetection"]:
command.append("-sV")
if agentConfig["osDetection"]:
command.append("-O")
if agentConfig["enableScripts"] and agentConfig["scripts"]:
command.append("--script")
command.append(agentConfig["scripts"])
if agentConfig["scriptTimeout"]:
command.append("--script-timeout")
command.append(str(agentConfig["scriptTimeout"]))
if agentConfig["hostTimeout"]:
command.append("--host-timeout")
command.append(str(agentConfig["hostTimeout"]))
if agentConfig["osScanLimit"]:
command.append("--osscan-limit")
if agentConfig["noPing"]:
command.append("-Pn")
if agentConfig["onlyOpens"]:
command.append("--open")
command.append(target_data["target"])
TIMEDOUT = False
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
)
try:
out, err = process.communicate(timeout=int(agentConfig["scanTimeout"]))
except:
try:
TIMEDOUT = True
print_err("Scan %s timed out" % scan_id)
process.kill()
except:
pass
if TIMEDOUT:
result["is_up"] = False
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
result["timed_out"] = True
cleanup_files(scan_id)
print_info("Submitting scan timeout notice for %s" % result["ip"])
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
else:
print_info("Scan %s Complete" % scan_id)
for ext in "nmap", "gnmap", "xml":
try:
result[ext + "_data"] = open("data/natlas." + scan_id + "." + ext).read()
except:
print_err("Couldn't read natlas.%s.%s" % (scan_id, ext))
return ERR["DATANOTFOUND"]
nmap_report = NmapParser.parse(result["xml_data"])
if nmap_report.hosts_total < 1:
print_err("No hosts found in scan data")
return "[!] No hosts found in scan data"
elif nmap_report.hosts_total > 1:
print_err("Too many hosts found in scan data")
return "[!] Too many hosts found in scan data"
elif nmap_report.hosts_down == 1:
# host is down
result["is_up"] = False
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting host down notice for %s" % (result["ip"]))
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
elif nmap_report.hosts_up == 1 and len(nmap_report.hosts) == 0:
# host is up but no reportable ports were found
result["is_up"] = True
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting %s ports for %s" % (result["port_count"], result["ip"]))
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
else:
# host is up and reportable ports were found
result["is_up"] = nmap_report.hosts[0].is_up()
result["port_count"] = len(nmap_report.hosts[0].get_ports())
result["screenshots"] = []
if (
target_data["agent_config"]["webScreenshots"]
and shutil.which("aquatone") is not None
):
targetServices = []
if "80/tcp" in result["nmap_data"]:
targetServices.append("http")
if "443/tcp" in result["nmap_data"]:
targetServices.append("https")
if len(targetServices) > 0:
print_info(
"Attempting to take %s screenshot(s) for %s"
% (", ".join(targetServices).upper(), result["ip"])
)
screenshotutils.runAquatone(target, scan_id, targetServices)
serviceMapping = {"http": 80, "https": 443}
for service in targetServices:
screenshotPath = (
"data/aquatone."
+ scan_id
+ "/screenshots/"
+ service
+ "__"
+ target.replace(".", "_")
+ ".png"
)
if not os.path.isfile(screenshotPath):
continue
result["screenshots"].append(
{
"host": target,
"port": serviceMapping[service],
"service": service.upper(),
"data": str(base64.b64encode(open(screenshotPath, "rb").read()))[
2:-1
],
}
)
print_info(
"%s screenshot acquired for %s" % (service.upper(), result["ip"])
)
if (
target_data["agent_config"]["vncScreenshots"]
and shutil.which("vncsnapshot") is not None
):
if "5900/tcp" in result["nmap_data"]:
print_info("Attempting to take VNC screenshot for %s" % result["ip"])
if screenshotutils.runVNCSnapshot(target, scan_id) is True:
result["screenshots"].append(
{
"host": target,
"port": 5900,
"service": "VNC",
"data": str(
base64.b64encode(
open("data/natlas." + scan_id + ".vnc.jpg", "rb").read()
)
)[2:-1],
}
)
print_info("VNC screenshot acquired for %s" % result["ip"])
else:
print_err("Failed to acquire screenshot for %s" % result["ip"])
# submit result
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting %s ports for %s" % (result["port_count"], result["ip"]))
response = backoff_request(
giveup=True, endpoint="/api/submit", reqType="POST", postData=json.dumps(result)
)
|
def scan(target_data=None):
if not validate_target(target_data["target"]):
return ERR["INVALIDTARGET"]
result = {}
# If agent authentication is required, this agent id has to match a server side agent id
# If it's not required and an agent_id is set, we'll use that in scan data
# If it's not required and an agent_id is not set, we'll consider it an anonymous scan.
if config.agent_id:
result["agent"] = config.agent_id
else:
result["agent"] = "anonymous"
result["agent_version"] = config.NATLAS_VERSION
target = target_data["target"]
result["ip"] = target
result["scan_reason"] = target_data["scan_reason"]
result["tags"] = target_data["tags"]
scan_id = target_data["scan_id"]
result["scan_id"] = scan_id
agentConfig = target_data["agent_config"]
result["scan_start"] = datetime.now(timezone.utc).isoformat()
command = [
"nmap",
"-oA",
"data/natlas." + scan_id,
"--servicedb",
"./natlas-services",
]
if agentConfig["versionDetection"]:
command.append("-sV")
if agentConfig["osDetection"]:
command.append("-O")
if agentConfig["enableScripts"] and agentConfig["scripts"]:
command.append("--script")
command.append(agentConfig["scripts"])
if agentConfig["scriptTimeout"]:
command.append("--script-timeout")
command.append(str(agentConfig["scriptTimeout"]))
if agentConfig["hostTimeout"]:
command.append("--host-timeout")
command.append(str(agentConfig["hostTimeout"]))
if agentConfig["osScanLimit"]:
command.append("--osscan-limit")
if agentConfig["noPing"]:
command.append("-Pn")
if agentConfig["onlyOpens"]:
command.append("--open")
command.append(target_data["target"])
TIMEDOUT = False
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
)
try:
out, err = process.communicate(timeout=int(agentConfig["scanTimeout"]))
except:
try:
TIMEDOUT = True
print_err("Scan %s timed out" % scan_id)
process.kill()
except:
pass
if TIMEDOUT:
result["is_up"] = False
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
result["timed_out"] = True
cleanup_files(scan_id)
print_info("Submitting scan timeout notice for %s" % result["ip"])
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
else:
print_info("Scan %s Complete" % scan_id)
for ext in "nmap", "gnmap", "xml":
try:
result[ext + "_data"] = open("data/natlas." + scan_id + "." + ext).read()
except:
print_err("Couldn't read natlas.%s.%s" % (scan_id, ext))
return ERR["DATANOTFOUND"]
nmap_report = NmapParser.parse(result["xml_data"])
if nmap_report.hosts_total < 1:
print_err("No hosts found in scan data")
return "[!] No hosts found in scan data"
elif nmap_report.hosts_total > 1:
print_err("Too many hosts found in scan data")
return "[!] Too many hosts found in scan data"
elif nmap_report.hosts_down == 1:
# host is down
result["is_up"] = False
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting host down notice for %s" % (result["ip"]))
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
elif nmap_report.hosts_up == 1 and len(nmap_report.hosts) == 0:
# host is up but no reportable ports were found
result["is_up"] = True
result["port_count"] = 0
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting %s ports for %s" % (result["port_count"], result["ip"]))
response = backoff_request(
giveup=True,
endpoint="/api/submit",
reqType="POST",
postData=json.dumps(result),
)
return
else:
# host is up and reportable ports were found
result["is_up"] = nmap_report.hosts[0].is_up()
result["port_count"] = len(nmap_report.hosts[0].get_ports())
result["screenshots"] = []
if (
target_data["agent_config"]["webScreenshots"]
and shutil.which("aquatone") is not None
):
targetServices = []
if "80/tcp" in result["nmap_data"]:
targetServices.append("http")
if "443/tcp" in result["nmap_data"]:
targetServices.append("https")
if len(targetServices) > 0:
print_info(
"Attempting to take %s screenshot(s) for %s"
% (", ".join(targetServices).upper(), result["ip"])
)
screenshotutils.runAquatone(target, scan_id, targetServices)
serviceMapping = {"http": 80, "https": 443}
for service in targetServices:
screenshotPath = (
"data/aquatone."
+ scan_id
+ "/screenshots/"
+ service
+ "__"
+ target.replace(".", "_")
+ ".png"
)
if not os.path.isfile(screenshotPath):
continue
result["screenshots"].append(
{
"host": target,
"port": serviceMapping[service],
"service": service.upper(),
"data": str(base64.b64encode(open(screenshotPath, "rb").read()))[
2:-1
],
}
)
print_info(
"%s screenshot acquired for %s" % (service.upper(), result["ip"])
)
shutil.rmtree("data/aquatone.%s" % scan_id)
if (
target_data["agent_config"]["vncScreenshots"]
and shutil.which("vncsnapshot") is not None
):
if "5900/tcp" in result["nmap_data"]:
print_info("Attempting to take VNC screenshot for %s" % result["ip"])
if screenshotutils.runVNCSnapshot(target, scan_id) is True:
result["screenshots"].append(
{
"host": target,
"port": 5900,
"service": "VNC",
"data": str(
base64.b64encode(
open("data/natlas." + scan_id + ".vnc.jpg", "rb").read()
)
)[2:-1],
}
)
os.remove("data/natlas." + scan_id + ".vnc.jpg")
print_info("VNC screenshot acquired for %s" % result["ip"])
else:
print_err("Failed to acquire screenshot for %s" % result["ip"])
# submit result
result["scan_stop"] = datetime.now(timezone.utc).isoformat()
cleanup_files(scan_id)
print_info("Submitting %s ports for %s" % (result["port_count"], result["ip"]))
response = backoff_request(
giveup=True, endpoint="/api/submit", reqType="POST", postData=json.dumps(result)
)
|
https://github.com/natlas/natlas/issues/157
|
[!] (3ud7hcwtnz) Killing slacker process
[!] Thread-2: Failed to acquire HTTPS screenshot for 178.128.187.177
[+] Thread-2: Cleaning up files for 3ud7hcwtnz
Exception in thread Thread-2:
Traceback (most recent call last):
File "/usr/lib/python3.7/threading.py", line 917, in _bootstrap_inner
self.run()
File "./natlas-agent.py", line 359, in run
result = scan(target_data)
File "./natlas-agent.py", line 335, in scan
cleanup_files(scan_id)
File "./natlas-agent.py", line 181, in cleanup_files
os.remove(file)
IsADirectoryError: [Errno 21] Is a directory: 'data/aquatone.3ud7hcwtnz.https'
|
IsADirectoryError
|
def backoff_request(giveup=False, *args, **kwargs):
attempt = 0
result = None
while not result:
result = make_request(*args, **kwargs)
RETRY = False
if not result:
RETRY = True
elif "retry" in result.json() and result.json()["retry"]:
RETRY = True
elif not "retry" in result.json() or not result.json()["retry"]:
return result
if RETRY:
attempt += 1
if giveup and attempt == config.max_retries:
print_err(
"Request to %s failed %s times. Giving up"
% (config.server, config.max_retries)
)
return None
jitter = (
random.randint(0, 1000) / 1000
) # jitter to reduce chance of locking
current_sleep = (
min(config.backoff_max, config.backoff_base * 2**attempt) + jitter
)
print_err(
"Request to %s failed. Waiting %s seconds before retrying."
% (config.server, current_sleep)
)
time.sleep(current_sleep)
return result
|
def backoff_request(giveup=False, *args, **kwargs):
attempt = 0
result = None
while not result:
result = make_request(*args, **kwargs)
RETRY = False
if not result:
RETRY = True
if "retry" in result.json() and result.json()["retry"]:
RETRY = True
elif not "retry" in result.json() or not result.json()["retry"]:
return result
if RETRY:
attempt += 1
if giveup and attempt == config.max_retries:
print_err(
"Request to %s failed %s times. Giving up"
% (config.server, config.max_retries)
)
return None
jitter = (
random.randint(0, 1000) / 1000
) # jitter to reduce chance of locking
current_sleep = (
min(config.backoff_max, config.backoff_base * 2**attempt) + jitter
)
print_err(
"Request to %s failed. Waiting %s seconds before retrying."
% (config.server, current_sleep)
)
time.sleep(current_sleep)
return result
|
https://github.com/natlas/natlas/issues/155
|
[!] Thread-2: Expected 200, received 502
[!] Thread-2: Error: Expecting value: line 1 column 1 (char 0)
Exception in thread Thread-2:
Traceback (most recent call last):
File "/usr/lib/python3.7/threading.py", line 917, in _bootstrap_inner
self.run()
File "./natlas-agent.py", line 359, in run
result = scan(target_data)
File "./natlas-agent.py", line 286, in scan
response = backoff_request(giveup=True, endpoint="/api/submit", reqType="POST", postData=json.dumps(result))
File "./natlas-agent.py", line 117, in backoff_request
if 'retry' in result.json() and result.json()['retry']:
AttributeError: 'bool' object has no attribute 'json'
|
AttributeError
|
def create_data_iters_and_vocabs(
args: argparse.Namespace,
max_seq_len_source: int,
max_seq_len_target: int,
shared_vocab: bool,
resume_training: bool,
output_folder: str,
) -> Tuple[
"data_io.BaseParallelSampleIter",
"data_io.BaseParallelSampleIter",
"data_io.DataConfig",
List[vocab.Vocab],
vocab.Vocab,
]:
"""
Create the data iterators and the vocabularies.
:param args: Arguments as returned by argparse.
:param max_seq_len_source: Source maximum sequence length.
:param max_seq_len_target: Target maximum sequence length.
:param shared_vocab: Whether to create a shared vocabulary.
:param resume_training: Whether to resume training.
:param output_folder: Output folder.
:return: The data iterators (train, validation, config_data) as well as the source and target vocabularies.
"""
num_words_source, num_words_target = args.num_words
num_words_source = num_words_source if num_words_source > 0 else None
num_words_target = num_words_target if num_words_target > 0 else None
word_min_count_source, word_min_count_target = args.word_min_count
batch_num_devices = (
1 if args.use_cpu else sum(-di if di < 0 else 1 for di in args.device_ids)
)
batch_by_words = args.batch_type == C.BATCH_TYPE_WORD
validation_sources = [args.validation_source] + args.validation_source_factors
validation_sources = [str(os.path.abspath(source)) for source in validation_sources]
either_raw_or_prepared_error_msg = (
"Either specify a raw training corpus with %s and %s or a preprocessed corpus "
"with %s."
% (C.TRAINING_ARG_SOURCE, C.TRAINING_ARG_TARGET, C.TRAINING_ARG_PREPARED_DATA)
)
if args.prepared_data is not None:
utils.check_condition(
args.source is None and args.target is None,
either_raw_or_prepared_error_msg,
)
if not resume_training:
utils.check_condition(
args.source_vocab is None and args.target_vocab is None,
"You are using a prepared data folder, which is tied to a vocabulary. "
"To change it you need to rerun data preparation with a different vocabulary.",
)
train_iter, validation_iter, data_config, source_vocabs, target_vocab = (
data_io.get_prepared_data_iters(
prepared_data_dir=args.prepared_data,
validation_sources=validation_sources,
validation_target=str(os.path.abspath(args.validation_target)),
shared_vocab=shared_vocab,
batch_size=args.batch_size,
batch_by_words=batch_by_words,
batch_num_devices=batch_num_devices,
fill_up=args.fill_up,
)
)
check_condition(
len(source_vocabs) == len(args.source_factors_num_embed) + 1,
"Data was prepared with %d source factors, but only provided %d source factor dimensions."
% (len(source_vocabs), len(args.source_factors_num_embed) + 1),
)
if resume_training:
# resuming training. Making sure the vocabs in the model and in the prepared data match up
model_source_vocabs = vocab.load_source_vocabs(output_folder)
for i, (v, mv) in enumerate(zip(source_vocabs, model_source_vocabs)):
utils.check_condition(
vocab.are_identical(v, mv),
"Prepared data and resumed model source vocab %d do not match." % i,
)
model_target_vocab = vocab.load_target_vocab(output_folder)
utils.check_condition(
vocab.are_identical(target_vocab, model_target_vocab),
"Prepared data and resumed model target vocabs do not match.",
)
check_condition(
data_config.num_source_factors == len(validation_sources),
"Training and validation data must have the same number of factors, but found %d and %d."
% (data_config.num_source_factors, len(validation_sources)),
)
return train_iter, validation_iter, data_config, source_vocabs, target_vocab
else:
utils.check_condition(
args.prepared_data is None
and args.source is not None
and args.target is not None,
either_raw_or_prepared_error_msg,
)
if resume_training:
# Load the existing vocabs created when starting the training run.
source_vocabs = vocab.load_source_vocabs(output_folder)
target_vocab = vocab.load_target_vocab(output_folder)
# Recover the vocabulary path from the data info file:
data_info = cast(
data_io.DataInfo, Config.load(os.path.join(output_folder, C.DATA_INFO))
)
source_vocab_paths = data_info.source_vocabs
target_vocab_path = data_info.target_vocab
else:
# Load or create vocabs
source_vocab_paths = [args.source_vocab] + [None] * len(args.source_factors)
target_vocab_path = args.target_vocab
source_vocabs, target_vocab = vocab.load_or_create_vocabs(
source_paths=[args.source] + args.source_factors,
target_path=args.target,
source_vocab_paths=source_vocab_paths,
target_vocab_path=target_vocab_path,
shared_vocab=shared_vocab,
num_words_source=num_words_source,
num_words_target=num_words_target,
word_min_count_source=word_min_count_source,
word_min_count_target=word_min_count_target,
pad_to_multiple_of=args.pad_vocab_to_multiple_of,
)
check_condition(
len(args.source_factors) == len(args.source_factors_num_embed),
"Number of source factor data (%d) differs from provided source factor dimensions (%d)"
% (len(args.source_factors), len(args.source_factors_num_embed)),
)
sources = [args.source] + args.source_factors
sources = [str(os.path.abspath(source)) for source in sources]
check_condition(
len(sources) == len(validation_sources),
"Training and validation data must have the same number of factors, but found %d and %d."
% (len(source_vocabs), len(validation_sources)),
)
train_iter, validation_iter, config_data, data_info = (
data_io.get_training_data_iters(
sources=sources,
target=os.path.abspath(args.target),
validation_sources=validation_sources,
validation_target=os.path.abspath(args.validation_target),
source_vocabs=source_vocabs,
target_vocab=target_vocab,
source_vocab_paths=source_vocab_paths,
target_vocab_path=target_vocab_path,
shared_vocab=shared_vocab,
batch_size=args.batch_size,
batch_by_words=batch_by_words,
batch_num_devices=batch_num_devices,
fill_up=args.fill_up,
max_seq_len_source=max_seq_len_source,
max_seq_len_target=max_seq_len_target,
bucketing=not args.no_bucketing,
bucket_width=args.bucket_width,
)
)
data_info_fname = os.path.join(output_folder, C.DATA_INFO)
logger.info("Writing data config to '%s'", data_info_fname)
data_info.save(data_info_fname)
return train_iter, validation_iter, config_data, source_vocabs, target_vocab
|
def create_data_iters_and_vocabs(
args: argparse.Namespace,
max_seq_len_source: int,
max_seq_len_target: int,
shared_vocab: bool,
resume_training: bool,
output_folder: str,
) -> Tuple[
"data_io.BaseParallelSampleIter",
"data_io.BaseParallelSampleIter",
"data_io.DataConfig",
List[vocab.Vocab],
vocab.Vocab,
]:
"""
Create the data iterators and the vocabularies.
:param args: Arguments as returned by argparse.
:param max_seq_len_source: Source maximum sequence length.
:param max_seq_len_target: Target maximum sequence length.
:param shared_vocab: Whether to create a shared vocabulary.
:param resume_training: Whether to resume training.
:param output_folder: Output folder.
:return: The data iterators (train, validation, config_data) as well as the source and target vocabularies.
"""
num_words_source, num_words_target = args.num_words
num_words_source = num_words_source if num_words_source > 0 else None
num_words_target = num_words_target if num_words_target > 0 else None
word_min_count_source, word_min_count_target = args.word_min_count
batch_num_devices = (
1 if args.use_cpu else sum(-di if di < 0 else 1 for di in args.device_ids)
)
batch_by_words = args.batch_type == C.BATCH_TYPE_WORD
validation_sources = [args.validation_source] + args.validation_source_factors
validation_sources = [str(os.path.abspath(source)) for source in validation_sources]
either_raw_or_prepared_error_msg = (
"Either specify a raw training corpus with %s and %s or a preprocessed corpus "
"with %s."
% (C.TRAINING_ARG_SOURCE, C.TRAINING_ARG_TARGET, C.TRAINING_ARG_PREPARED_DATA)
)
if args.prepared_data is not None:
utils.check_condition(
args.source is None and args.target is None,
either_raw_or_prepared_error_msg,
)
if not resume_training:
utils.check_condition(
args.source_vocab is None and args.target_vocab is None,
"You are using a prepared data folder, which is tied to a vocabulary. "
"To change it you need to rerun data preparation with a different vocabulary.",
)
train_iter, validation_iter, data_config, source_vocabs, target_vocab = (
data_io.get_prepared_data_iters(
prepared_data_dir=args.prepared_data,
validation_sources=validation_sources,
validation_target=str(os.path.abspath(args.validation_target)),
shared_vocab=shared_vocab,
batch_size=args.batch_size,
batch_by_words=batch_by_words,
batch_num_devices=batch_num_devices,
fill_up=args.fill_up,
)
)
check_condition(
len(source_vocabs) == len(args.source_factors_num_embed) + 1,
"Data was prepared with %d source factors, but only provided %d source factor dimensions."
% (len(source_vocabs), len(args.source_factors_num_embed) + 1),
)
if resume_training:
# resuming training. Making sure the vocabs in the model and in the prepared data match up
model_source_vocabs = vocab.load_source_vocabs(output_folder)
for i, (v, mv) in enumerate(zip(source_vocabs, model_source_vocabs)):
utils.check_condition(
vocab.are_identical(v, mv),
"Prepared data and resumed model source vocab %d do not match." % i,
)
model_target_vocab = vocab.load_target_vocab(output_folder)
utils.check_condition(
vocab.are_identical(target_vocab, model_target_vocab),
"Prepared data and resumed model target vocabs do not match.",
)
check_condition(
len(args.source_factors) == len(args.validation_source_factors),
"Training and validation data must have the same number of factors: %d vs. %d."
% (len(args.source_factors), len(args.validation_source_factors)),
)
return train_iter, validation_iter, data_config, source_vocabs, target_vocab
else:
utils.check_condition(
args.prepared_data is None
and args.source is not None
and args.target is not None,
either_raw_or_prepared_error_msg,
)
if resume_training:
# Load the existing vocabs created when starting the training run.
source_vocabs = vocab.load_source_vocabs(output_folder)
target_vocab = vocab.load_target_vocab(output_folder)
# Recover the vocabulary path from the data info file:
data_info = cast(
data_io.DataInfo, Config.load(os.path.join(output_folder, C.DATA_INFO))
)
source_vocab_paths = data_info.source_vocabs
target_vocab_path = data_info.target_vocab
else:
# Load or create vocabs
source_vocab_paths = [args.source_vocab] + [None] * len(args.source_factors)
target_vocab_path = args.target_vocab
source_vocabs, target_vocab = vocab.load_or_create_vocabs(
source_paths=[args.source] + args.source_factors,
target_path=args.target,
source_vocab_paths=source_vocab_paths,
target_vocab_path=target_vocab_path,
shared_vocab=shared_vocab,
num_words_source=num_words_source,
num_words_target=num_words_target,
word_min_count_source=word_min_count_source,
word_min_count_target=word_min_count_target,
pad_to_multiple_of=args.pad_vocab_to_multiple_of,
)
check_condition(
len(args.source_factors) == len(args.source_factors_num_embed),
"Number of source factor data (%d) differs from provided source factor dimensions (%d)"
% (len(args.source_factors), len(args.source_factors_num_embed)),
)
sources = [args.source] + args.source_factors
sources = [str(os.path.abspath(source)) for source in sources]
train_iter, validation_iter, config_data, data_info = (
data_io.get_training_data_iters(
sources=sources,
target=os.path.abspath(args.target),
validation_sources=validation_sources,
validation_target=os.path.abspath(args.validation_target),
source_vocabs=source_vocabs,
target_vocab=target_vocab,
source_vocab_paths=source_vocab_paths,
target_vocab_path=target_vocab_path,
shared_vocab=shared_vocab,
batch_size=args.batch_size,
batch_by_words=batch_by_words,
batch_num_devices=batch_num_devices,
fill_up=args.fill_up,
max_seq_len_source=max_seq_len_source,
max_seq_len_target=max_seq_len_target,
bucketing=not args.no_bucketing,
bucket_width=args.bucket_width,
)
)
data_info_fname = os.path.join(output_folder, C.DATA_INFO)
logger.info("Writing data config to '%s'", data_info_fname)
data_info.save(data_info_fname)
return train_iter, validation_iter, config_data, source_vocabs, target_vocab
|
https://github.com/awslabs/sockeye/issues/513
|
[INFO:sockeye.training] Checkpoint [1] Train-perplexity=928.900834
infer_shape error. Arguments:
source: (10, 10, 1)
target: (10, 10)
target_label: (10, 10)
[ERROR:__main__] Uncaught exception
Traceback (most recent call last):
File "/Users/fhieber/miniconda3/lib/python3.6/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/Users/fhieber/miniconda3/lib/python3.6/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/Users/fhieber/workspace/nmt/src/Sockeye/sockeye/train.py", line 881, in <module>
main()
File "/Users/fhieber/workspace/nmt/src/Sockeye/sockeye/train.py", line 756, in main
train(args)
File "/Users/fhieber/workspace/nmt/src/Sockeye/sockeye/train.py", line 877, in train
existing_parameters=args.params)
File "/Users/fhieber/workspace/nmt/src/Sockeye/sockeye/training.py", line 573, in fit
self._evaluate(validation_iter, metric_val)
File "/Users/fhieber/workspace/nmt/src/Sockeye/sockeye/training.py", line 711, in _evaluate
self.model.evaluate(val_iter, val_metric)
File "/Users/fhieber/workspace/nmt/src/Sockeye/sockeye/training.py", line 240, in evaluate
self.module.forward(eval_batch, is_train=False)
File "/Users/fhieber/miniconda3/lib/python3.6/site-packages/mxnet/module/bucketing_module.py", line 456, in forward
self._curr_module.forward(data_batch, is_train=is_train)
File "/Users/fhieber/miniconda3/lib/python3.6/site-packages/mxnet/module/module.py", line 610, in forward
self.reshape(new_dshape, new_lshape)
File "/Users/fhieber/miniconda3/lib/python3.6/site-packages/mxnet/module/module.py", line 471, in reshape
self._exec_group.reshape(self._data_shapes, self._label_shapes)
File "/Users/fhieber/miniconda3/lib/python3.6/site-packages/mxnet/module/executor_group.py", line 382, in reshape
self.bind_exec(data_shapes, label_shapes, reshape=True)
File "/Users/fhieber/miniconda3/lib/python3.6/site-packages/mxnet/module/executor_group.py", line 358, in bind_exec
allow_up_sizing=True, **dict(data_shapes_i + label_shapes_i))
File "/Users/fhieber/miniconda3/lib/python3.6/site-packages/mxnet/executor.py", line 402, in reshape
arg_shapes, _, aux_shapes = self._symbol.infer_shape(**kwargs)
File "/Users/fhieber/miniconda3/lib/python3.6/site-packages/mxnet/symbol/symbol.py", line 990, in infer_shape
res = self._infer_shape_impl(False, *args, **kwargs)
File "/Users/fhieber/miniconda3/lib/python3.6/site-packages/mxnet/symbol/symbol.py", line 1120, in _infer_shape_impl
ctypes.byref(complete)))
File "/Users/fhieber/miniconda3/lib/python3.6/site-packages/mxnet/base.py", line 149, in check_call
raise MXNetError(py_str(_LIB.MXGetLastError()))
mxnet.base.MXNetError: Error in operator source_embed_factor_split: [15:36:18] src/operator/./slice_channel-inl.h:208: Check failed: dshape[real_axis] % param_.num_outputs == 0U (1 vs. 0) You are trying to split the 2-th axis of input tensor with shape [10,10,1] into num_outputs=2 evenly sized chunks, but this is not possible because 2 does not evenly divide 1
|
mxnet.base.MXNetError
|
def trending(self, count=30, minCursor=0, maxCursor=0, **kwargs) -> dict:
"""
Gets trending TikToks
"""
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
response = []
first = True
while len(response) < count:
if count < maxCount:
realCount = count
else:
realCount = maxCount
query = {
"count": realCount,
"id": 1,
"secUid": "",
"maxCursor": maxCursor,
"minCursor": minCursor,
"sourceType": 12,
"appId": 1233,
"region": region,
"priority_region": region,
"language": language,
}
api_url = "{}api/item_list/?{}&{}".format(
BASE_URL, self.__add_new_params__(), urlencode(query)
)
res = self.getData(url=api_url, **kwargs)
for t in res.get("items", []):
response.append(t)
if not res["hasMore"] and not first:
logging.info("TikTok isn't sending more TikToks beyond this point.")
return response[:count]
realCount = count - len(response)
maxCursor = res["maxCursor"]
first = False
return response[:count]
|
def trending(self, count=30, minCursor=0, maxCursor=0, **kwargs) -> dict:
"""
Gets trending TikToks
"""
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
response = []
first = True
while len(response) < count:
if count < maxCount:
realCount = count
else:
realCount = maxCount
query = {
"count": realCount,
"id": 1,
"secUid": "",
"maxCursor": maxCursor,
"minCursor": minCursor,
"sourceType": 12,
"appId": 1233,
"region": region,
"priority_region": region,
"language": language,
}
api_url = "{}api/item_list/?{}&{}".format(
BASE_URL, self.__add_new_params__(), urlencode(query)
)
res = self.getData(url=api_url, **kwargs)
for t in res.get("items", []):
response.append(t)
if not res["hasMore"] and not first:
logging.info("TikTok isn't sending more TikToks beyond this point.")
return response[:count]
realCount = count - len(response)
maxCursor = res["maxCursor"]
minCursor = res["minCursor"]
first = False
return response[:count]
|
https://github.com/davidteather/TikTok-Api/issues/338
|
Invalid Response
Traceback (most recent call last):
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 162, in getData
return r.json()
File "/home/user/.local/lib/python3.8/site-packages/requests/models.py", line 898, in json
return complexjson.loads(self.text, **kwargs)
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/tiktok/tiktokhelper.py", line 33, in addUser
data = api.getUser(uniqueId)
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 1010, in getUser
return self.getData(url=api_url, **kwargs)["userInfo"]
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 170, in getData
raise Exception("Invalid Response")
Exception: Invalid Response
|
json.decoder.JSONDecodeError
|
def getUser(self, username, **kwargs) -> dict:
"""Gets the full exposed user object
:param username: The username of the user.
:param language: The 2 letter code of the language to return.
Note: Doesn't seem to have an affect.
:param proxy: The IP address of a proxy to make requests from.
"""
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
query = {"language": language}
api_url = "{}node/share/user/@{}?{}&{}".format(
BASE_URL, quote(username), self.__add_new_params__(), urlencode(query)
)
return self.getData(url=api_url, **kwargs)["userInfo"]
|
def getUser(self, username, **kwargs) -> dict:
"""Gets the full exposed user object
:param username: The username of the user.
:param language: The 2 letter code of the language to return.
Note: Doesn't seem to have an affect.
:param proxy: The IP address of a proxy to make requests from.
"""
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
query = {"uniqueId": username, "language": language}
api_url = "{}api/user/detail/?{}&{}".format(
BASE_URL, self.__add_new_params__(), urlencode(query)
)
return self.getData(url=api_url, **kwargs)["userInfo"]
|
https://github.com/davidteather/TikTok-Api/issues/338
|
Invalid Response
Traceback (most recent call last):
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 162, in getData
return r.json()
File "/home/user/.local/lib/python3.8/site-packages/requests/models.py", line 898, in json
return complexjson.loads(self.text, **kwargs)
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/tiktok/tiktokhelper.py", line 33, in addUser
data = api.getUser(uniqueId)
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 1010, in getUser
return self.getData(url=api_url, **kwargs)["userInfo"]
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 170, in getData
raise Exception("Invalid Response")
Exception: Invalid Response
|
json.decoder.JSONDecodeError
|
def update_messager():
if not check("TikTokApi"):
# Outdated
print(
"TikTokApi package is outdated, please consider upgrading! \n(You can suppress this by setting ignore_version=True in the TikTokApi constructor)"
)
if not check_future_deprecation():
print(
"Your version of python is going to be deprecated, for future updates upgrade to 3.7+"
)
|
def update_messager():
# if not check("TikTokApi"):
# Outdated
# print("TikTokApi package is outdated, please consider upgrading! \n(You can suppress this by setting ignore_version to True while calling the TikTok Api class)")
if not check_future_deprecation():
print(
"Your version of python is going to be deprecated, for future updates upgrade to 3.7+"
)
|
https://github.com/davidteather/TikTok-Api/issues/338
|
Invalid Response
Traceback (most recent call last):
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 162, in getData
return r.json()
File "/home/user/.local/lib/python3.8/site-packages/requests/models.py", line 898, in json
return complexjson.loads(self.text, **kwargs)
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/tiktok/tiktokhelper.py", line 33, in addUser
data = api.getUser(uniqueId)
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 1010, in getUser
return self.getData(url=api_url, **kwargs)["userInfo"]
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 170, in getData
raise Exception("Invalid Response")
Exception: Invalid Response
|
json.decoder.JSONDecodeError
|
def __init__(self, **kwargs):
"""The TikTokApi class. Used to interact with TikTok.
:param logging_level: The logging level you want the program to run at
:param request_delay: The amount of time to wait before making a request.
:param executablePath: The location of the chromedriver.exe
"""
# Forces Singleton
if TikTokApi.__instance is None:
TikTokApi.__instance = self
else:
raise Exception("Only one TikTokApi object is allowed")
logging.basicConfig(level=kwargs.get("logging_level", logging.WARNING))
logging.info("Class initalized")
self.executablePath = kwargs.get("executablePath", None)
self.userAgent = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/86.0.4240.111 Safari/537.36"
)
self.proxy = kwargs.get("proxy", None)
self.signer_url = kwargs.get("external_signer", None)
if self.signer_url == None:
self.browser = browser(**kwargs)
self.userAgent = self.browser.userAgent
try:
self.timezone_name = self.__format_new_params__(self.browser.timezone_name)
self.browser_language = self.__format_new_params__(
self.browser.browser_language
)
self.browser_platform = self.__format_new_params__(
self.browser.browser_platform
)
self.browser_name = self.__format_new_params__(self.browser.browser_name)
self.browser_version = self.__format_new_params__(self.browser.browser_version)
self.width = self.browser.width
self.height = self.browser.height
except Exception as e:
logging.warning("An error occured but it was ignored.")
self.timezone_name = ""
self.browser_language = ""
self.browser_platform = ""
self.browser_name = ""
self.browser_version = ""
self.width = "1920"
self.height = "1080"
self.request_delay = kwargs.get("request_delay", None)
|
def __init__(self, **kwargs):
"""The TikTokApi class. Used to interact with TikTok.
:param logging_level: The logging level you want the program to run at
:param request_delay: The amount of time to wait before making a request.
:param executablePath: The location of the chromedriver.exe
"""
# Forces Singleton
if TikTokApi.__instance is None:
TikTokApi.__instance = self
else:
raise Exception("Only one TikTokApi object is allowed")
logging.basicConfig(level=kwargs.get("logging_level", logging.CRITICAL))
logging.info("Class initalized")
self.executablePath = kwargs.get("executablePath", None)
self.userAgent = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/86.0.4240.111 Safari/537.36"
)
self.proxy = kwargs.get("proxy", None)
self.signer_url = kwargs.get("external_signer", None)
if self.signer_url == None:
self.browser = browser(**kwargs)
self.userAgent = self.browser.userAgent
try:
self.timezone_name = self.__format_new_params__(self.browser.timezone_name)
self.browser_language = self.__format_new_params__(
self.browser.browser_language
)
self.browser_platform = self.__format_new_params__(
self.browser.browser_platform
)
self.browser_name = self.__format_new_params__(self.browser.browser_name)
self.browser_version = self.__format_new_params__(self.browser.browser_version)
self.width = self.browser.width
self.height = self.browser.height
except Exception as e:
logging.warning("An error occured but it was ignored.")
self.timezone_name = ""
self.browser_language = ""
self.browser_platform = ""
self.browser_name = ""
self.browser_version = ""
self.width = "1920"
self.height = "1080"
self.request_delay = kwargs.get("request_delay", None)
|
https://github.com/davidteather/TikTok-Api/issues/338
|
Invalid Response
Traceback (most recent call last):
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 162, in getData
return r.json()
File "/home/user/.local/lib/python3.8/site-packages/requests/models.py", line 898, in json
return complexjson.loads(self.text, **kwargs)
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/tiktok/tiktokhelper.py", line 33, in addUser
data = api.getUser(uniqueId)
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 1010, in getUser
return self.getData(url=api_url, **kwargs)["userInfo"]
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 170, in getData
raise Exception("Invalid Response")
Exception: Invalid Response
|
json.decoder.JSONDecodeError
|
def getMusicObject(self, id, **kwargs) -> dict:
"""Returns a music object for a specific sound id.
:param id: The sound id to search by.
:param language: The 2 letter code of the language to return.
Note: Doesn't seem to have an affect.
:param proxy: The IP address of a proxy to make requests from.
"""
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
query = {"musicId": id, "language": language}
api_url = "{}node/share/music/{}?{}&{}".format(
BASE_URL,
self.get_music_title(id) + "-" + str(id),
self.__add_new_params__(),
urlencode(query),
)
return self.getData(url=api_url, **kwargs)
|
def getMusicObject(self, id, **kwargs) -> dict:
"""Returns a music object for a specific sound id.
:param id: The sound id to search by.
:param language: The 2 letter code of the language to return.
Note: Doesn't seem to have an affect.
:param proxy: The IP address of a proxy to make requests from.
"""
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
query = {"musicId": id, "language": language}
api_url = "{}api/music/detail/?{}&{}".format(
BASE_URL, self.__add_new_params__(), urlencode(query)
)
return self.getData(url=api_url, **kwargs)
|
https://github.com/davidteather/TikTok-Api/issues/338
|
Invalid Response
Traceback (most recent call last):
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 162, in getData
return r.json()
File "/home/user/.local/lib/python3.8/site-packages/requests/models.py", line 898, in json
return complexjson.loads(self.text, **kwargs)
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/tiktok/tiktokhelper.py", line 33, in addUser
data = api.getUser(uniqueId)
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 1010, in getUser
return self.getData(url=api_url, **kwargs)["userInfo"]
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 170, in getData
raise Exception("Invalid Response")
Exception: Invalid Response
|
json.decoder.JSONDecodeError
|
def getHashtagObject(self, hashtag, **kwargs) -> dict:
"""Returns a hashtag object.
:param hashtag: The hashtag to search by.
:param language: The 2 letter code of the language to return.
Note: Doesn't seem to have an affect.
:param proxy: The IP address of a proxy to make requests from.
"""
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
query = {"name": hashtag, "isName": True, "lang": language}
api_url = "{}node/share/tag/{}?{}&{}".format(
BASE_URL, quote(hashtag), self.__add_new_params__(), urlencode(query)
)
return self.getData(url=api_url, **kwargs)
|
def getHashtagObject(self, hashtag, **kwargs) -> dict:
"""Returns a hashtag object.
:param hashtag: The hashtag to search by.
:param language: The 2 letter code of the language to return.
Note: Doesn't seem to have an affect.
:param proxy: The IP address of a proxy to make requests from.
"""
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
query = {"challengeName": hashtag, "language": language}
api_url = "{}api/challenge/detail/?{}&{}".format(
BASE_URL, self.__add_new_params__(), urlencode(query)
)
return self.getData(url=api_url, **kwargs)
|
https://github.com/davidteather/TikTok-Api/issues/338
|
Invalid Response
Traceback (most recent call last):
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 162, in getData
return r.json()
File "/home/user/.local/lib/python3.8/site-packages/requests/models.py", line 898, in json
return complexjson.loads(self.text, **kwargs)
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/tiktok/tiktokhelper.py", line 33, in addUser
data = api.getUser(uniqueId)
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 1010, in getUser
return self.getData(url=api_url, **kwargs)["userInfo"]
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 170, in getData
raise Exception("Invalid Response")
Exception: Invalid Response
|
json.decoder.JSONDecodeError
|
def getHashtagDetails(self, hashtag, **kwargs) -> dict:
"""Returns a hashtag object.
:param hashtag: The hashtag to search by.
:param language: The 2 letter code of the language to return.
Note: Doesn't seem to have an affect.
:param proxy: The IP address of a proxy to make requests from.
"""
logging.warning(
"The getHashtagDetails will be deprecated in a future version. Replace it with getHashtagObject"
)
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
query = {"lang": language}
api_url = "{}node/share/tag/{}?{}&{}".format(
BASE_URL, quote(hashtag), self.__add_new_params__(), urlencode(query)
)
return self.getData(url=api_url, **kwargs)
|
def getHashtagDetails(self, hashtag, **kwargs) -> dict:
"""Returns a hashtag object.
:param hashtag: The hashtag to search by.
:param language: The 2 letter code of the language to return.
Note: Doesn't seem to have an affect.
:param proxy: The IP address of a proxy to make requests from.
"""
(
region,
language,
proxy,
maxCount,
did,
) = self.__process_kwargs__(kwargs)
kwargs["custom_did"] = did
query = {"language": language}
api_url = "{}node/share/tag/{}?{}&{}".format(
BASE_URL, quote(hashtag), self.__add_new_params__(), urlencode(query)
)
return self.getData(url=api_url, **kwargs)
|
https://github.com/davidteather/TikTok-Api/issues/338
|
Invalid Response
Traceback (most recent call last):
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 162, in getData
return r.json()
File "/home/user/.local/lib/python3.8/site-packages/requests/models.py", line 898, in json
return complexjson.loads(self.text, **kwargs)
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/tiktok/tiktokhelper.py", line 33, in addUser
data = api.getUser(uniqueId)
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 1010, in getUser
return self.getData(url=api_url, **kwargs)["userInfo"]
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 170, in getData
raise Exception("Invalid Response")
Exception: Invalid Response
|
json.decoder.JSONDecodeError
|
def __add_new_params__(self) -> str:
query = {
"aid": 1988,
"app_name": "tiktok_web",
"device_platform": "web",
"referer": "https://www.tiktok.com/",
"user_agent": self.__format_new_params__(self.userAgent),
"cookie_enabled": "true",
"screen_width": self.width,
"screen_height": self.height,
"browser_language": self.browser_language,
"browser_platform": self.browser_platform,
"browser_name": self.browser_name,
"browser_version": self.browser_version,
"browser_online": "true",
"ac": "4g",
"timezone_name": self.timezone_name,
"appId": 1233,
"appType": "m",
"isAndroid": False,
"isMobile": False,
"isIOS": False,
"OS": "windows",
"page_referer": "https://www.tiktok.com/",
}
return urlencode(query)
|
def __add_new_params__(self) -> str:
query = {
"aid": 1988,
"app_name": "tiktok_web",
"device_platform": "web",
"referer": "https://www.tiktok.com/",
"user_agent": self.__format_new_params__(self.userAgent),
"cookie_enabled": "true",
"screen_width": self.width,
"screen_height": self.height,
"browser_language": self.browser_language,
"browser_platform": self.browser_platform,
"browser_name": self.browser_name,
"browser_version": self.browser_version,
"browser_online": "true",
"ac": "4g",
"timezone_name": self.timezone_name,
"appId": 1233,
"appType": "m",
"isAndroid": False,
"isMobile": False,
"isIOS": False,
"OS": "windows",
}
return urlencode(query)
|
https://github.com/davidteather/TikTok-Api/issues/338
|
Invalid Response
Traceback (most recent call last):
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 162, in getData
return r.json()
File "/home/user/.local/lib/python3.8/site-packages/requests/models.py", line 898, in json
return complexjson.loads(self.text, **kwargs)
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/tiktok/tiktokhelper.py", line 33, in addUser
data = api.getUser(uniqueId)
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 1010, in getUser
return self.getData(url=api_url, **kwargs)["userInfo"]
File "/home/user/.local/lib/python3.8/site-packages/TikTokApi/tiktok.py", line 170, in getData
raise Exception("Invalid Response")
Exception: Invalid Response
|
json.decoder.JSONDecodeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.