after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def get_diff_input_output(self, pin, port, attrs, invert):
self._check_feature(
"differential input/output", pin, attrs, valid_xdrs=(0, 1, 2), valid_attrs=True
)
m = Module()
i, o, t = self._get_xdr_buffer(m, pin, i_invert=invert, o_invert=invert)
for bit in range(pin.width):
m.submodules["{}_{}".format(pin.name, bit)] = Instance(
"IOBUFDS",
i_T=t,
i_I=o[bit],
o_O=i[bit],
io_IO=port.p[bit],
io_IOB=port.n[bit],
)
return m
|
def get_diff_input_output(self, pin, p_port, n_port, attrs, invert):
self._check_feature(
"differential input/output", pin, attrs, valid_xdrs=(0, 1, 2), valid_attrs=True
)
m = Module()
i, o, t = self._get_xdr_buffer(m, pin, i_invert=invert, o_invert=invert)
for bit in range(len(p_port)):
m.submodules["{}_{}".format(pin.name, bit)] = Instance(
"IOBUFDS",
i_T=t,
i_I=o[bit],
o_O=i[bit],
io_IO=p_port[bit],
io_IOB=n_port[bit],
)
return m
|
https://github.com/nmigen/nmigen/issues/456
|
ERROR: IO 'ddr3_0__dqs__n[0]' is unconstrained in LPF (override this error with --lpf-allow-unconstrained)
0 warnings, 1 error
Traceback (most recent call last):
File "diffpairissue.py", line 20, in <module>
ECPIX585Platform().build(Top(), do_program=True)
File "/home/jeanthomas/Documents/nmigen/nmigen/build/plat.py", line 94, in build
products = plan.execute_local(build_dir)
File "/home/jeanthomas/Documents/nmigen/nmigen/build/run.py", line 95, in execute_local
subprocess.check_call(["sh", "{}.sh".format(self.script)])
File "/usr/lib64/python3.8/subprocess.py", line 364, in check_call
raise CalledProcessError(retcode, cmd)
subprocess.CalledProcessError: Command '['sh', 'build_top.sh']' returned non-zero exit status 255.
|
subprocess.CalledProcessError
|
def convert(
fi,
ios=None,
name="top",
special_overrides=dict(),
attr_translate=None,
create_clock_domains=True,
display_run=False,
):
if display_run:
warnings.warn(
"`display_run=True` support has been removed",
DeprecationWarning,
stacklevel=1,
)
if special_overrides:
warnings.warn(
"`special_overrides` support as well as `Special` has been removed",
DeprecationWarning,
stacklevel=1,
)
# TODO: attr_translate
if isinstance(fi, Module):
fi = fi.get_fragment()
def missing_domain(name):
if create_clock_domains:
return ClockDomain(name)
v_output = verilog.convert(
elaboratable=fi, name=name, ports=ios or (), missing_domain=missing_domain
)
output = ConvOutput()
output.set_main_source(v_output)
return output
|
def convert(
fi,
ios=None,
name="top",
special_overrides=dict(),
attr_translate=None,
create_clock_domains=True,
display_run=False,
):
if display_run:
warnings.warn(
"`display_run=True` support has been removed",
DeprecationWarning,
stacklevel=1,
)
if special_overrides:
warnings.warn(
"`special_overrides` support as well as `Special` has been removed",
DeprecationWarning,
stacklevel=1,
)
# TODO: attr_translate
def missing_domain(name):
if create_clock_domains:
return ClockDomain(name)
v_output = verilog.convert(
elaboratable=fi.get_fragment(),
name=name,
ports=ios or (),
missing_domain=missing_domain,
)
output = ConvOutput()
output.set_main_source(v_output)
return output
|
https://github.com/nmigen/nmigen/issues/344
|
Traceback (most recent call last):
File "src/test_run_simulation_bug.py", line 28, in <module>
test1()
File "src/test_run_simulation_bug.py", line 24, in test1
vcd_name="test_run_simulation_bug.vcd")
File "/home/jacob/projects/nmigen/nmigen/compat/sim/__init__.py", line 22, in run_simulation
fragment.domains += ClockDomain("sync")
AttributeError: 'MyModule' object has no attribute 'domains'
|
AttributeError
|
def run_simulation(
fragment_or_module,
generators,
clocks={"sync": 10},
vcd_name=None,
special_overrides={},
):
assert not special_overrides
if hasattr(fragment_or_module, "get_fragment"):
fragment = fragment_or_module.get_fragment()
else:
fragment = fragment_or_module
fragment = Fragment.get(fragment, platform=None)
if not isinstance(generators, dict):
generators = {"sync": generators}
if "sync" not in fragment.domains:
fragment.add_domains(ClockDomain("sync"))
sim = Simulator(fragment)
for domain, period in clocks.items():
sim.add_clock(period / 1e9, domain=domain)
for domain, processes in generators.items():
def wrap(process):
def wrapper():
yield from process
return wrapper
if isinstance(processes, Iterable) and not inspect.isgenerator(processes):
for process in processes:
sim.add_sync_process(wrap(process), domain=domain)
else:
sim.add_sync_process(wrap(processes), domain=domain)
if vcd_name is not None:
with sim.write_vcd(vcd_name):
sim.run()
else:
sim.run()
|
def run_simulation(
fragment_or_module,
generators,
clocks={"sync": 10},
vcd_name=None,
special_overrides={},
):
assert not special_overrides
if hasattr(fragment_or_module, "get_fragment"):
fragment = fragment_or_module.get_fragment()
else:
fragment = fragment_or_module
if not isinstance(generators, dict):
generators = {"sync": generators}
fragment.domains += ClockDomain("sync")
sim = Simulator(fragment)
for domain, period in clocks.items():
sim.add_clock(period / 1e9, domain=domain)
for domain, processes in generators.items():
def wrap(process):
def wrapper():
yield from process
return wrapper
if isinstance(processes, Iterable) and not inspect.isgenerator(processes):
for process in processes:
sim.add_sync_process(wrap(process), domain=domain)
else:
sim.add_sync_process(wrap(processes), domain=domain)
if vcd_name is not None:
with sim.write_vcd(vcd_name):
sim.run()
else:
sim.run()
|
https://github.com/nmigen/nmigen/issues/344
|
Traceback (most recent call last):
File "src/test_run_simulation_bug.py", line 28, in <module>
test1()
File "src/test_run_simulation_bug.py", line 24, in test1
vcd_name="test_run_simulation_bug.vcd")
File "/home/jacob/projects/nmigen/nmigen/compat/sim/__init__.py", line 22, in run_simulation
fragment.domains += ClockDomain("sync")
AttributeError: 'MyModule' object has no attribute 'domains'
|
AttributeError
|
def add_arguments(self, parser):
parser.add_argument("slugs", nargs="+", type=str)
parser.add_argument(
"-f",
action="store_true",
dest="force",
default=False,
help="Force a build in sphinx",
)
parser.add_argument(
"-V",
dest="version",
default="all",
help="Build a version, or all versions",
)
|
def add_arguments(self, parser):
parser.add_argument("slugs", nargs="+", type=str)
parser.add_argument(
"-r",
action="store_true",
dest="record",
default=False,
help="Make a Build",
)
parser.add_argument(
"-f",
action="store_true",
dest="force",
default=False,
help="Force a build in sphinx",
)
parser.add_argument(
"-V",
dest="version",
default=None,
help="Build a version, or all versions",
)
|
https://github.com/readthedocs/readthedocs.org/issues/3696
|
$ python manage.py update_repos pip
[28/Feb/2018 04:43:25] root:124[23847]: INFO Generating grammar tables from /usr/lib/python2.7/lib2to3/Grammar.txt
[28/Feb/2018 04:43:25] root:124[23847]: INFO Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
System check identified some issues:
WARNINGS:
?: (1_8.W001) The standalone TEMPLATE_* settings were deprecated in Django 1.8 and the TEMPLATES dictionary takes precedence. You must put the values of the following settings into your default TEMPLATES dict: TEMPLATE_DEBUG.
?: (guardian.W001) Guardian authentication backend is not hooked. You can add this in settings as eg: `AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend')`.
[28/Feb/2018 04:43:27] readthedocs.core.management.commands.update_repos:80[23847]: INFO Building Pip
[28/Feb/2018 04:43:27] requests.packages.urllib3.connectionpool:207[23847]: INFO Starting new HTTP connection (1): localhost
[28/Feb/2018 04:43:27] requests.packages.urllib3.connectionpool:242[23847]: INFO Resetting dropped connection: localhost
[28/Feb/2018 04:43:27] readthedocs.projects.tasks:233[23847]: INFO (Build) [pip:latest] Updating docs from VCS
[28/Feb/2018 04:43:27] readthedocs.projects.tasks:233[23847]: INFO (Build) [pip:latest] Checking out version latest: master
[28/Feb/2018 04:43:27] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git status' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:27] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git remote set-url origin https://github.com/pypa/pip' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:27] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git fetch --tags --prune' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git show-ref remotes/origin/master' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:193[23847]: WARNING Recording command exit_code as success
[28/Feb/2018 04:43:29] requests.packages.urllib3.connectionpool:242[23847]: INFO Resetting dropped connection: localhost
[28/Feb/2018 04:43:29] readthedocs.vcs_support.utils:101[23847]: INFO Lock (pip): Releasing
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:450[23847]: ERROR (Build) [pip:latest] Client Error 400: http://localhost:8000/api/v2/command/
Traceback (most recent call last):
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 333, in run_setup
self.setup_vcs()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 452, in setup_vcs
self.sync_repo()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 124, in sync_repo
version_repo.checkout(self.version.identifier)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/backends/git.py", line 183, in checkout
identifier = self.find_ref(identifier)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/backends/git.py", line 207, in find_ref
if self.ref_exists('remotes/origin/' + ref):
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/backends/git.py", line 213, in ref_exists
code, _, _ = self.run('git', 'show-ref', ref, record_as_success=True)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/base.py", line 97, in run
build_cmd = self.environment.run(*cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 470, in run
return super(BuildEnvironment, self).run(*cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 310, in run
return self.run_command_class(cls=self.command_class, cmd=cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 476, in run_command_class
return super(BuildEnvironment, self).run_command_class(*cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 356, in run_command_class
self.record_command(build_cmd)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 456, in record_command
command.save()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 205, in save
api_v2.command.post(data)
File "/home/rm/src/readthedocs/rtd/local/lib/python2.7/site-packages/slumber/__init__.py", line 167, in post
resp = self._request("POST", data=data, files=files, params=kwargs)
File "/home/rm/src/readthedocs/rtd/local/lib/python2.7/site-packages/slumber/__init__.py", line 101, in _request
raise exception_class("Client Error %s: %s" % (resp.status_code, url), response=resp, content=resp.content)
HttpClientError: Client Error 400: http://localhost:8000/api/v2/command/
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:431[23847]: INFO (Build) [pip:latest] Build finished
[28/Feb/2018 04:43:29] readthedocs.projects.tasks:233[23847]: INFO (Build) [pip:latest] Failing build because of setup failure: Client Error 400: http://localhost:8000/api/v2/command/
[28/Feb/2018 04:43:29] readthedocs.projects.tasks:284[23847]: ERROR An unhandled exception was raised during build setup
Traceback (most recent call last):
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 276, in run
setup_successful = self.run_setup(record=record)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 355, in run_setup
self.send_notifications()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 663, in send_notifications
send_notifications.delay(self.version.pk, build_pk=self.build['id'])
KeyError: 'id'
[28/Feb/2018 04:43:29] celery.app.trace:123[23847]: INFO Task readthedocs.projects.tasks.update_docs[87cb2b57-91fb-48b7-b97b-ec22c31099f1] succeeded in 1.796398073s: False
|
HttpClientError
|
def handle(self, *args, **options):
force = options["force"]
version = options["version"]
if options.get("slugs", []):
for slug in options["slugs"]:
if version and version != "all":
log.info("Updating version %s for %s", version, slug)
for version in Version.objects.filter(
project__slug=slug,
slug=version,
):
trigger_build(project=version.project, version=version)
elif version == "all":
log.info("Updating all versions for %s", slug)
for version in Version.objects.filter(
project__slug=slug,
active=True,
uploaded=False,
):
build = Build.objects.create(
project=version.project,
version=version,
type="html",
state="triggered",
)
tasks.UpdateDocsTask().run(
pk=version.project_id,
build_pk=build.pk,
version_pk=version.pk,
)
else:
p = Project.all_objects.get(slug=slug)
log.info("Building %s", p)
trigger_build(project=p, force=force)
else:
if version == "all":
log.info("Updating all versions")
for version in Version.objects.filter(
active=True,
uploaded=False,
):
tasks.UpdateDocsTask().run(
pk=version.project_id,
force=force,
version_pk=version.pk,
)
else:
log.info("Updating all docs")
for project in Project.objects.all():
tasks.UpdateDocsTask().run(
pk=project.pk,
force=force,
)
|
def handle(self, *args, **options):
record = options["record"]
force = options["force"]
version = options["version"]
if options.get("slugs", []):
for slug in options["slugs"]:
if version and version != "all":
log.info("Updating version %s for %s", version, slug)
for version in Version.objects.filter(
project__slug=slug,
slug=version,
):
trigger_build(project=version.project, version=version)
elif version == "all":
log.info("Updating all versions for %s", slug)
for version in Version.objects.filter(
project__slug=slug,
active=True,
uploaded=False,
):
build_pk = None
if record:
build = Build.objects.create(
project=version.project,
version=version,
type="html",
state="triggered",
)
build_pk = build.pk
tasks.UpdateDocsTask().run(
pk=version.project_id,
build_pk=build_pk,
record=record,
version_pk=version.pk,
)
else:
p = Project.all_objects.get(slug=slug)
log.info("Building %s", p)
trigger_build(project=p, force=force, record=record)
else:
if version == "all":
log.info("Updating all versions")
for version in Version.objects.filter(
active=True,
uploaded=False,
):
tasks.UpdateDocsTask().run(
pk=version.project_id,
record=record,
force=force,
version_pk=version.pk,
)
else:
log.info("Updating all docs")
for project in Project.objects.all():
tasks.UpdateDocsTask().run(
pk=project.pk,
record=record,
force=force,
)
|
https://github.com/readthedocs/readthedocs.org/issues/3696
|
$ python manage.py update_repos pip
[28/Feb/2018 04:43:25] root:124[23847]: INFO Generating grammar tables from /usr/lib/python2.7/lib2to3/Grammar.txt
[28/Feb/2018 04:43:25] root:124[23847]: INFO Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
System check identified some issues:
WARNINGS:
?: (1_8.W001) The standalone TEMPLATE_* settings were deprecated in Django 1.8 and the TEMPLATES dictionary takes precedence. You must put the values of the following settings into your default TEMPLATES dict: TEMPLATE_DEBUG.
?: (guardian.W001) Guardian authentication backend is not hooked. You can add this in settings as eg: `AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend')`.
[28/Feb/2018 04:43:27] readthedocs.core.management.commands.update_repos:80[23847]: INFO Building Pip
[28/Feb/2018 04:43:27] requests.packages.urllib3.connectionpool:207[23847]: INFO Starting new HTTP connection (1): localhost
[28/Feb/2018 04:43:27] requests.packages.urllib3.connectionpool:242[23847]: INFO Resetting dropped connection: localhost
[28/Feb/2018 04:43:27] readthedocs.projects.tasks:233[23847]: INFO (Build) [pip:latest] Updating docs from VCS
[28/Feb/2018 04:43:27] readthedocs.projects.tasks:233[23847]: INFO (Build) [pip:latest] Checking out version latest: master
[28/Feb/2018 04:43:27] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git status' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:27] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git remote set-url origin https://github.com/pypa/pip' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:27] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git fetch --tags --prune' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git show-ref remotes/origin/master' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:193[23847]: WARNING Recording command exit_code as success
[28/Feb/2018 04:43:29] requests.packages.urllib3.connectionpool:242[23847]: INFO Resetting dropped connection: localhost
[28/Feb/2018 04:43:29] readthedocs.vcs_support.utils:101[23847]: INFO Lock (pip): Releasing
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:450[23847]: ERROR (Build) [pip:latest] Client Error 400: http://localhost:8000/api/v2/command/
Traceback (most recent call last):
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 333, in run_setup
self.setup_vcs()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 452, in setup_vcs
self.sync_repo()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 124, in sync_repo
version_repo.checkout(self.version.identifier)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/backends/git.py", line 183, in checkout
identifier = self.find_ref(identifier)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/backends/git.py", line 207, in find_ref
if self.ref_exists('remotes/origin/' + ref):
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/backends/git.py", line 213, in ref_exists
code, _, _ = self.run('git', 'show-ref', ref, record_as_success=True)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/base.py", line 97, in run
build_cmd = self.environment.run(*cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 470, in run
return super(BuildEnvironment, self).run(*cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 310, in run
return self.run_command_class(cls=self.command_class, cmd=cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 476, in run_command_class
return super(BuildEnvironment, self).run_command_class(*cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 356, in run_command_class
self.record_command(build_cmd)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 456, in record_command
command.save()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 205, in save
api_v2.command.post(data)
File "/home/rm/src/readthedocs/rtd/local/lib/python2.7/site-packages/slumber/__init__.py", line 167, in post
resp = self._request("POST", data=data, files=files, params=kwargs)
File "/home/rm/src/readthedocs/rtd/local/lib/python2.7/site-packages/slumber/__init__.py", line 101, in _request
raise exception_class("Client Error %s: %s" % (resp.status_code, url), response=resp, content=resp.content)
HttpClientError: Client Error 400: http://localhost:8000/api/v2/command/
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:431[23847]: INFO (Build) [pip:latest] Build finished
[28/Feb/2018 04:43:29] readthedocs.projects.tasks:233[23847]: INFO (Build) [pip:latest] Failing build because of setup failure: Client Error 400: http://localhost:8000/api/v2/command/
[28/Feb/2018 04:43:29] readthedocs.projects.tasks:284[23847]: ERROR An unhandled exception was raised during build setup
Traceback (most recent call last):
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 276, in run
setup_successful = self.run_setup(record=record)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 355, in run_setup
self.send_notifications()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 663, in send_notifications
send_notifications.delay(self.version.pk, build_pk=self.build['id'])
KeyError: 'id'
[28/Feb/2018 04:43:29] celery.app.trace:123[23847]: INFO Task readthedocs.projects.tasks.update_docs[87cb2b57-91fb-48b7-b97b-ec22c31099f1] succeeded in 1.796398073s: False
|
HttpClientError
|
def handle(self, *args, **options):
api = slumber.API(base_url="http://readthedocs.org/api/v1/")
user1 = User.objects.filter(pk__gt=0).order_by("pk").first()
for slug in options["project_slug"]:
self.stdout.write("Importing {slug} ...".format(slug=slug))
project_data = api.project.get(slug=slug)
try:
project_data = project_data["objects"][0]
except (KeyError, IndexError):
self.stderr.write(
"Cannot find {slug} in API. Response was:\n{response}".format(
slug=slug, response=json.dumps(project_data)
)
)
try:
project = Project.objects.get(slug=slug)
except Project.DoesNotExist:
project = Project(slug=slug)
exclude_attributes = (
"absolute_url",
"analytics_code",
"canonical_url",
"users",
)
for attribute in project_data:
if attribute not in exclude_attributes:
setattr(project, attribute, project_data[attribute])
project.user = user1
project.save()
if user1:
project.users.add(user1)
call_command("update_repos", project.slug, version="all")
|
def handle(self, *args, **options):
api = slumber.API(base_url="http://readthedocs.org/api/v1/")
user1 = User.objects.filter(pk__gt=0).order_by("pk").first()
for slug in options["project_slug"]:
self.stdout.write("Importing {slug} ...".format(slug=slug))
project_data = api.project.get(slug=slug)
try:
project_data = project_data["objects"][0]
except (KeyError, IndexError):
self.stderr.write(
"Cannot find {slug} in API. Response was:\n{response}".format(
slug=slug, response=json.dumps(project_data)
)
)
try:
project = Project.objects.get(slug=slug)
except Project.DoesNotExist:
project = Project(slug=slug)
exclude_attributes = (
"absolute_url",
"analytics_code",
"canonical_url",
"users",
)
for attribute in project_data:
if attribute not in exclude_attributes:
setattr(project, attribute, project_data[attribute])
project.user = user1
project.save()
if user1:
project.users.add(user1)
call_command("update_repos", project.slug, record=True, version="all")
|
https://github.com/readthedocs/readthedocs.org/issues/3696
|
$ python manage.py update_repos pip
[28/Feb/2018 04:43:25] root:124[23847]: INFO Generating grammar tables from /usr/lib/python2.7/lib2to3/Grammar.txt
[28/Feb/2018 04:43:25] root:124[23847]: INFO Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
System check identified some issues:
WARNINGS:
?: (1_8.W001) The standalone TEMPLATE_* settings were deprecated in Django 1.8 and the TEMPLATES dictionary takes precedence. You must put the values of the following settings into your default TEMPLATES dict: TEMPLATE_DEBUG.
?: (guardian.W001) Guardian authentication backend is not hooked. You can add this in settings as eg: `AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend')`.
[28/Feb/2018 04:43:27] readthedocs.core.management.commands.update_repos:80[23847]: INFO Building Pip
[28/Feb/2018 04:43:27] requests.packages.urllib3.connectionpool:207[23847]: INFO Starting new HTTP connection (1): localhost
[28/Feb/2018 04:43:27] requests.packages.urllib3.connectionpool:242[23847]: INFO Resetting dropped connection: localhost
[28/Feb/2018 04:43:27] readthedocs.projects.tasks:233[23847]: INFO (Build) [pip:latest] Updating docs from VCS
[28/Feb/2018 04:43:27] readthedocs.projects.tasks:233[23847]: INFO (Build) [pip:latest] Checking out version latest: master
[28/Feb/2018 04:43:27] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git status' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:27] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git remote set-url origin https://github.com/pypa/pip' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:27] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git fetch --tags --prune' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:117[23847]: INFO Running: 'git show-ref remotes/origin/master' [/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/user_builds/pip/checkouts/latest]
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:193[23847]: WARNING Recording command exit_code as success
[28/Feb/2018 04:43:29] requests.packages.urllib3.connectionpool:242[23847]: INFO Resetting dropped connection: localhost
[28/Feb/2018 04:43:29] readthedocs.vcs_support.utils:101[23847]: INFO Lock (pip): Releasing
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:450[23847]: ERROR (Build) [pip:latest] Client Error 400: http://localhost:8000/api/v2/command/
Traceback (most recent call last):
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 333, in run_setup
self.setup_vcs()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 452, in setup_vcs
self.sync_repo()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 124, in sync_repo
version_repo.checkout(self.version.identifier)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/backends/git.py", line 183, in checkout
identifier = self.find_ref(identifier)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/backends/git.py", line 207, in find_ref
if self.ref_exists('remotes/origin/' + ref):
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/backends/git.py", line 213, in ref_exists
code, _, _ = self.run('git', 'show-ref', ref, record_as_success=True)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/vcs_support/base.py", line 97, in run
build_cmd = self.environment.run(*cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 470, in run
return super(BuildEnvironment, self).run(*cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 310, in run
return self.run_command_class(cls=self.command_class, cmd=cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 476, in run_command_class
return super(BuildEnvironment, self).run_command_class(*cmd, **kwargs)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 356, in run_command_class
self.record_command(build_cmd)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 456, in record_command
command.save()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/doc_builder/environments.py", line 205, in save
api_v2.command.post(data)
File "/home/rm/src/readthedocs/rtd/local/lib/python2.7/site-packages/slumber/__init__.py", line 167, in post
resp = self._request("POST", data=data, files=files, params=kwargs)
File "/home/rm/src/readthedocs/rtd/local/lib/python2.7/site-packages/slumber/__init__.py", line 101, in _request
raise exception_class("Client Error %s: %s" % (resp.status_code, url), response=resp, content=resp.content)
HttpClientError: Client Error 400: http://localhost:8000/api/v2/command/
[28/Feb/2018 04:43:29] readthedocs.doc_builder.environments:431[23847]: INFO (Build) [pip:latest] Build finished
[28/Feb/2018 04:43:29] readthedocs.projects.tasks:233[23847]: INFO (Build) [pip:latest] Failing build because of setup failure: Client Error 400: http://localhost:8000/api/v2/command/
[28/Feb/2018 04:43:29] readthedocs.projects.tasks:284[23847]: ERROR An unhandled exception was raised during build setup
Traceback (most recent call last):
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 276, in run
setup_successful = self.run_setup(record=record)
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 355, in run_setup
self.send_notifications()
File "/home/rm/src/readthedocs/rtd/checkouts/readthedocs.org/readthedocs/projects/tasks.py", line 663, in send_notifications
send_notifications.delay(self.version.pk, build_pk=self.build['id'])
KeyError: 'id'
[28/Feb/2018 04:43:29] celery.app.trace:123[23847]: INFO Task readthedocs.projects.tasks.update_docs[87cb2b57-91fb-48b7-b97b-ec22c31099f1] succeeded in 1.796398073s: False
|
HttpClientError
|
def setup_api():
session = requests.Session()
if API_HOST.startswith("https"):
# Only use the HostHeaderSSLAdapter for HTTPS connections
adapter_class = host_header_ssl.HostHeaderSSLAdapter
else:
adapter_class = requests.adapters.HTTPAdapter
session.mount(
API_HOST,
adapter_class(max_retries=3),
)
session.headers.update({"Host": PRODUCTION_DOMAIN})
api_config = {
"base_url": "%s/api/v1/" % API_HOST,
"session": session,
}
if USER and PASS:
log.debug("Using slumber with user %s, pointed at %s", USER, API_HOST)
session.auth = (USER, PASS)
else:
log.warning("SLUMBER_USERNAME/PASSWORD settings are not set")
return API(**api_config)
|
def setup_api():
session = Session()
session.mount(API_HOST, host_header_ssl.HostHeaderSSLAdapter())
session.headers.update({"Host": PRODUCTION_DOMAIN})
api_config = {
"base_url": "%s/api/v1/" % API_HOST,
"session": session,
}
if USER and PASS:
log.debug("Using slumber with user %s, pointed at %s", USER, API_HOST)
session.auth = (USER, PASS)
else:
log.warning("SLUMBER_USERNAME/PASSWORD settings are not set")
return API(**api_config)
|
https://github.com/readthedocs/readthedocs.org/issues/4494
|
Traceback (most recent call last):
File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/tasks.py", line 333, in run
self.project = self.get_project(pk)
File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/tasks.py", line 512, in get_project
project_data = api_v2.project(project_pk).get()
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/slumber/__init__.py", line 155, in get
resp = self._request("GET", params=kwargs)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/slumber/__init__.py", line 97, in _request
resp = self._store["session"].request(method, url, data=data, params=params, files=files, headers=headers)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/requests/sessions.py", line 512, in request
resp = self.send(prep, **send_kwargs)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/requests/sessions.py", line 622, in send
r = adapter.send(request, **kwargs)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/requests_toolbelt/adapters/host_header_ssl.py", line 43, in send
return super(HostHeaderSSLAdapter, self).send(request, **kwargs)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/requests/adapters.py", line 445, in send
timeout=timeout
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/urllib3/connectionpool.py", line 589, in urlopen
conn = self._get_conn(timeout=pool_timeout)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/urllib3/connectionpool.py", line 251, in _get_conn
return conn or self._new_conn()
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/urllib3/connectionpool.py", line 212, in _new_conn
strict=self.strict, **self.conn_kw)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/urllib3/connection.py", line 125, in __init__
_HTTPConnection.__init__(self, *args, **kw)
TypeError: __init__() got an unexpected keyword argument 'assert_hostname'
|
TypeError
|
def setup_api():
session = requests.Session()
if API_HOST.startswith("https"):
# Only use the HostHeaderSSLAdapter for HTTPS connections
adapter_class = host_header_ssl.HostHeaderSSLAdapter
else:
adapter_class = requests.adapters.HTTPAdapter
session.mount(
API_HOST,
adapter_class(max_retries=3),
)
session.headers.update({"Host": PRODUCTION_DOMAIN})
api_config = {
"base_url": "%s/api/v2/" % API_HOST,
"serializer": serialize.Serializer(
default="json-drf",
serializers=[
serialize.JsonSerializer(),
DrfJsonSerializer(),
],
),
"session": session,
}
if USER and PASS:
log.debug(
"Using slumber v2 with user %s, pointed at %s",
USER,
API_HOST,
)
session.auth = (USER, PASS)
else:
log.warning("SLUMBER_USERNAME/PASSWORD settings are not set")
return API(**api_config)
|
def setup_api():
session = requests.Session()
session.mount(
API_HOST,
host_header_ssl.HostHeaderSSLAdapter(
max_retries=3,
),
)
session.headers.update({"Host": PRODUCTION_DOMAIN})
api_config = {
"base_url": "%s/api/v2/" % API_HOST,
"serializer": serialize.Serializer(
default="json-drf",
serializers=[
serialize.JsonSerializer(),
DrfJsonSerializer(),
],
),
"session": session,
}
if USER and PASS:
log.debug(
"Using slumber v2 with user %s, pointed at %s",
USER,
API_HOST,
)
session.auth = (USER, PASS)
else:
log.warning("SLUMBER_USERNAME/PASSWORD settings are not set")
return API(**api_config)
|
https://github.com/readthedocs/readthedocs.org/issues/4494
|
Traceback (most recent call last):
File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/tasks.py", line 333, in run
self.project = self.get_project(pk)
File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/tasks.py", line 512, in get_project
project_data = api_v2.project(project_pk).get()
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/slumber/__init__.py", line 155, in get
resp = self._request("GET", params=kwargs)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/slumber/__init__.py", line 97, in _request
resp = self._store["session"].request(method, url, data=data, params=params, files=files, headers=headers)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/requests/sessions.py", line 512, in request
resp = self.send(prep, **send_kwargs)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/requests/sessions.py", line 622, in send
r = adapter.send(request, **kwargs)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/requests_toolbelt/adapters/host_header_ssl.py", line 43, in send
return super(HostHeaderSSLAdapter, self).send(request, **kwargs)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/requests/adapters.py", line 445, in send
timeout=timeout
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/urllib3/connectionpool.py", line 589, in urlopen
conn = self._get_conn(timeout=pool_timeout)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/urllib3/connectionpool.py", line 251, in _get_conn
return conn or self._new_conn()
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/urllib3/connectionpool.py", line 212, in _new_conn
strict=self.strict, **self.conn_kw)
File "/home/humitos/.pyenv/versions/3.6.6/envs/readthedocs.org/lib/python3.6/site-packages/urllib3/connection.py", line 125, in __init__
_HTTPConnection.__init__(self, *args, **kw)
TypeError: __init__() got an unexpected keyword argument 'assert_hostname'
|
TypeError
|
def create_session(self):
"""Create OAuth session for user
This configures the OAuth session based on the :py:class:`SocialToken`
attributes. If there is an ``expires_at``, treat the session as an auto
renewing token. Some providers expire tokens after as little as 2
hours.
"""
token = self.account.socialtoken_set.first()
if token is None:
return None
token_config = {
"access_token": token.token,
"token_type": "bearer",
}
if token.expires_at is not None:
token_expires = (token.expires_at - datetime.now()).total_seconds()
token_config.update(
{
"refresh_token": token.token_secret,
"expires_in": token_expires,
}
)
self.session = OAuth2Session(
client_id=token.app.client_id,
token=token_config,
auto_refresh_kwargs={
"client_id": token.app.client_id,
"client_secret": token.app.secret,
},
auto_refresh_url=self.get_adapter().access_token_url,
token_updater=self.token_updater(token),
)
return self.session or None
|
def create_session(self):
"""Create OAuth session for user
This configures the OAuth session based on the :py:class:`SocialToken`
attributes. If there is an ``expires_at``, treat the session as an auto
renewing token. Some providers expire tokens after as little as 2
hours.
"""
token = self.account.socialtoken_set.first()
if token is None:
return None
token_config = {
"access_token": str(token.token),
"token_type": "bearer",
}
if token.expires_at is not None:
token_expires = (token.expires_at - datetime.now()).total_seconds()
token_config.update(
{
"refresh_token": str(token.token_secret),
"expires_in": token_expires,
}
)
self.session = OAuth2Session(
client_id=token.app.client_id,
token=token_config,
auto_refresh_kwargs={
"client_id": token.app.client_id,
"client_secret": token.app.secret,
},
auto_refresh_url=self.get_adapter().access_token_url,
token_updater=self.token_updater(token),
)
return self.session or None
|
https://github.com/readthedocs/readthedocs.org/issues/2992
|
[05/Jul/2017 14:44:13] celery.worker.job:282[33151]: ERROR Task readthedocs.oauth.tasks.SyncRemoteRepositories[86bd1cbd-0c99-44a6-b5d7-9beadb917a7b] raised unexpected: InvalidClientIdError(u'(invalid_request) (invalid_request) Invalid
refresh_token',)
Traceback (most recent call last):
File "/Users/anthony/.pyenv/versions/rtd-2.7.8/lib/python2.7/site-packages/celery/app/trace.py", line 240, in trace_task
R = retval = fun(*args, **kwargs)
File "/Users/anthony/.pyenv/versions/rtd-2.7.8/lib/python2.7/site-packages/celery/app/trace.py", line 438, in __protected_call__
return self.run(*args, **kwargs)
File "/Users/anthony/dev/readthedocs.org/readthedocs/core/utils/tasks/public.py", line 71, in run
result = self.run_public(*args, **kwargs)
File "/Users/anthony/dev/readthedocs.org/readthedocs/oauth/tasks.py", line 22, in run_public
service.sync()
File "/Users/anthony/dev/readthedocs.org/readthedocs/oauth/services/bitbucket.py", line 38, in sync
self.sync_repositories()
File "/Users/anthony/dev/readthedocs.org/readthedocs/oauth/services/bitbucket.py", line 46, in sync_repositories
'https://bitbucket.org/api/2.0/repositories/?role=member')
File "/Users/anthony/dev/readthedocs.org/readthedocs/oauth/services/bitbucket.py", line 172, in paginate
resp = self.get_session().get(url)
File "/Users/anthony/.pyenv/versions/rtd-2.7.8/lib/python2.7/site-packages/requests/sessions.py", line 468, in get
return self.request('GET', url, **kwargs)
File "/Users/anthony/.pyenv/versions/rtd-2.7.8/lib/python2.7/site-packages/requests_oauthlib/oauth2_session.py", line 343, in request
self.auto_refresh_url, auth=auth, **kwargs
File "/Users/anthony/.pyenv/versions/rtd-2.7.8/lib/python2.7/site-packages/requests_oauthlib/oauth2_session.py", line 309, in refresh_token
self.token = self._client.parse_request_body_response(r.text, scope=self.scope)
File "/Users/anthony/.pyenv/versions/rtd-2.7.8/lib/python2.7/site-packages/oauthlib/oauth2/rfc6749/clients/base.py", line 409, in parse_request_body_response
self.token = parse_token_response(body, scope=scope)
File "/Users/anthony/.pyenv/versions/rtd-2.7.8/lib/python2.7/site-packages/oauthlib/oauth2/rfc6749/parameters.py", line 376, in parse_token_response
validate_token_parameters(params)
File "/Users/anthony/.pyenv/versions/rtd-2.7.8/lib/python2.7/site-packages/oauthlib/oauth2/rfc6749/parameters.py", line 383, in validate_token_parameters
raise_from_error(params.get('error'), params)
File "/Users/anthony/.pyenv/versions/rtd-2.7.8/lib/python2.7/site-packages/oauthlib/oauth2/rfc6749/errors.py", line 325, in raise_from_error
raise cls(**kwargs)
InvalidClientIdError: (invalid_request) (invalid_request) Invalid refresh_token
|
InvalidClientIdError
|
def build(self, **kwargs):
checkout_path = self.project.checkout_path(self.version.slug)
build_command = [
"python",
self.project.venv_bin(version=self.version.slug, bin="mkdocs"),
self.builder,
"--clean",
"--site-dir",
self.build_dir,
]
if self.use_theme:
build_command.extend(["--theme", "readthedocs"])
cmd_ret = self.run(
*build_command,
cwd=checkout_path,
bin_path=self.project.venv_bin(version=self.version.slug, bin=None),
)
return cmd_ret.successful
|
def build(self, **kwargs):
checkout_path = self.project.checkout_path(self.version.slug)
build_command = [
self.project.venv_bin(version=self.version.slug, bin="mkdocs"),
self.builder,
"--clean",
"--site-dir",
self.build_dir,
]
if self.use_theme:
build_command.extend(["--theme", "readthedocs"])
cmd_ret = self.run(*build_command, cwd=checkout_path)
return cmd_ret.successful
|
https://github.com/readthedocs/readthedocs.org/issues/994
|
doc_builder
-----
Traceback (most recent call last):
File "/home/docs/checkouts/readthedocs.org/readthedocs/projects/utils.py", line 85, in run
stderr=subprocess.PIPE, env=environment)
File "/usr/lib/python2.7/subprocess.py", line 710, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1327, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
|
OSError
|
def __init__(
self,
command,
cwd=None,
shell=False,
environment=None,
combine_output=True,
input_data=None,
build_env=None,
bin_path=None,
):
self.command = command
self.shell = shell
if cwd is None:
cwd = os.getcwd()
self.cwd = cwd
self.environment = os.environ.copy()
if environment is not None:
self.environment.update(environment)
self.combine_output = combine_output
self.build_env = build_env
self.bin_path = bin_path
self.status = None
self.input_data = input_data
self.output = None
self.error = None
|
def __init__(
self,
command,
cwd=None,
shell=False,
environment=None,
combine_output=True,
input_data=None,
build_env=None,
):
self.command = command
self.shell = shell
if cwd is None:
cwd = os.getcwd()
self.cwd = cwd
self.environment = os.environ.copy()
if environment is not None:
self.environment.update(environment)
self.combine_output = combine_output
self.build_env = build_env
self.status = None
self.input_data = input_data
self.output = None
self.error = None
|
https://github.com/readthedocs/readthedocs.org/issues/994
|
doc_builder
-----
Traceback (most recent call last):
File "/home/docs/checkouts/readthedocs.org/readthedocs/projects/utils.py", line 85, in run
stderr=subprocess.PIPE, env=environment)
File "/usr/lib/python2.7/subprocess.py", line 710, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1327, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
|
OSError
|
def run(self):
"""Set up subprocess and execute command
:param cmd_input: input to pass to command in STDIN
:type cmd_input: str
:param combine_output: combine STDERR into STDOUT
"""
log.info("Running: '%s' [%s]", self.get_command(), self.cwd)
stdout = subprocess.PIPE
stderr = subprocess.PIPE
stdin = None
if self.input_data is not None:
stdin = subprocess.PIPE
if self.combine_output:
stderr = subprocess.STDOUT
environment = {}
environment.update(self.environment)
environment["READTHEDOCS"] = "True"
if "DJANGO_SETTINGS_MODULE" in environment:
del environment["DJANGO_SETTINGS_MODULE"]
if "PYTHONPATH" in environment:
del environment["PYTHONPATH"]
if self.bin_path is not None:
env_paths = environment.get("PATH", "").split(":")
env_paths.insert(0, self.bin_path)
environment["PATH"] = ":".join(env_paths)
try:
proc = subprocess.Popen(
self.command,
shell=self.shell,
cwd=self.cwd,
stdin=stdin,
stdout=stdout,
stderr=stderr,
env=environment,
)
cmd_input = None
if self.input_data is not None:
cmd_input = self.input_data
cmd_output = proc.communicate(input=cmd_input)
(cmd_stdout, cmd_stderr) = cmd_output
try:
self.output = cmd_stdout.decode("utf-8", "replace")
except (TypeError, AttributeError):
self.output = None
try:
self.error = cmd_stderr.decode("utf-8", "replace")
except (TypeError, AttributeError):
self.error = None
self.status = proc.returncode
except OSError:
self.error = traceback.format_exc()
self.output = self.error
self.status = -1
|
def run(self):
"""Set up subprocess and execute command
:param cmd_input: input to pass to command in STDIN
:type cmd_input: str
:param combine_output: combine STDERR into STDOUT
"""
log.info("Running: '%s' [%s]", self.get_command(), self.cwd)
stdout = subprocess.PIPE
stderr = subprocess.PIPE
stdin = None
if self.input_data is not None:
stdin = subprocess.PIPE
if self.combine_output:
stderr = subprocess.STDOUT
environment = {}
environment.update(self.environment)
environment["READTHEDOCS"] = "True"
if "DJANGO_SETTINGS_MODULE" in environment:
del environment["DJANGO_SETTINGS_MODULE"]
if "PYTHONPATH" in environment:
del environment["PYTHONPATH"]
try:
proc = subprocess.Popen(
self.command,
shell=self.shell,
cwd=self.cwd,
stdin=stdin,
stdout=stdout,
stderr=stderr,
env=environment,
)
cmd_input = None
if self.input_data is not None:
cmd_input = self.input_data
cmd_output = proc.communicate(input=cmd_input)
(cmd_stdout, cmd_stderr) = cmd_output
try:
self.output = cmd_stdout.decode("utf-8", "replace")
except (TypeError, AttributeError):
self.output = None
try:
self.error = cmd_stderr.decode("utf-8", "replace")
except (TypeError, AttributeError):
self.error = None
self.status = proc.returncode
except OSError:
self.error = traceback.format_exc()
self.output = self.error
self.status = -1
|
https://github.com/readthedocs/readthedocs.org/issues/994
|
doc_builder
-----
Traceback (most recent call last):
File "/home/docs/checkouts/readthedocs.org/readthedocs/projects/utils.py", line 85, in run
stderr=subprocess.PIPE, env=environment)
File "/usr/lib/python2.7/subprocess.py", line 710, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1327, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
|
OSError
|
def venv_bin(self, version=LATEST, bin="python"):
"""Return path to the virtualenv bin path, or a specific binary
By default, return the path to the ``python`` binary in the virtual
environment path. If ``bin`` is :py:data:`None`, then return the path to
the virtual env path.
"""
if bin is None:
bin = ""
return os.path.join(self.venv_path(version), "bin", bin)
|
def venv_bin(self, version=LATEST, bin="python"):
return os.path.join(self.venv_path(version), "bin", bin)
|
https://github.com/readthedocs/readthedocs.org/issues/994
|
doc_builder
-----
Traceback (most recent call last):
File "/home/docs/checkouts/readthedocs.org/readthedocs/projects/utils.py", line 85, in run
stderr=subprocess.PIPE, env=environment)
File "/usr/lib/python2.7/subprocess.py", line 710, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1327, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
|
OSError
|
def wipe_path(pathname, idle=False):
"""Wipe the free space in the path
This function uses an iterator to update the GUI."""
def temporaryfile():
# reference
# http://en.wikipedia.org/wiki/Comparison_of_file_systems#Limits
maxlen = 185
f = None
while True:
try:
f = tempfile.NamedTemporaryFile(
dir=pathname, suffix=__random_string(maxlen), delete=False
)
# In case the application closes prematurely, make sure this
# file is deleted
atexit.register(delete, f.name, allow_shred=False, ignore_missing=True)
break
except OSError as e:
if e.errno in (
errno.ENAMETOOLONG,
errno.ENOSPC,
errno.ENOENT,
errno.EINVAL,
):
# ext3 on Linux 3.5 returns ENOSPC if the full path is greater than 264.
# Shrinking the size helps.
# Microsoft Windows returns ENOENT "No such file or directory"
# or EINVAL "Invalid argument"
# when the path is too long such as %TEMP% but not in C:\
if maxlen > 5:
maxlen -= 5
continue
raise
return f
def estimate_completion():
"""Return (percent, seconds) to complete"""
remaining_bytes = free_space(pathname)
done_bytes = start_free_bytes - remaining_bytes
if done_bytes < 0:
# maybe user deleted large file after starting wipe
done_bytes = 0
if 0 == start_free_bytes:
done_percent = 0
else:
done_percent = 1.0 * done_bytes / (start_free_bytes + 1)
done_time = time.time() - start_time
rate = done_bytes / (done_time + 0.0001) # bytes per second
remaining_seconds = int(remaining_bytes / (rate + 0.0001))
return 1, done_percent, remaining_seconds
logger.debug(_("Wiping path: %s") % pathname)
files = []
total_bytes = 0
start_free_bytes = free_space(pathname)
start_time = time.time()
# Because FAT32 has a maximum file size of 4,294,967,295 bytes,
# this loop is sometimes necessary to create multiple files.
while True:
try:
logger.debug(_("Creating new, temporary file for wiping free space."))
f = temporaryfile()
except OSError as e:
# Linux gives errno 24
# Windows gives errno 28 No space left on device
if e.errno in (errno.EMFILE, errno.ENOSPC):
break
else:
raise
# Get the file system type from the given path
fstype = get_filesystem_type(pathname)
fstype = fstype[0]
print("File System:" + fstype)
# print(f.name) # Added by Marvin for debugging #issue 1051
last_idle = time.time()
# Write large blocks to quickly fill the disk.
blanks = b"\0" * 65536
writtensize = 0
while True:
try:
if fstype != "vfat":
f.write(blanks)
elif writtensize < 4 * 1024 * 1024 * 1024 - 65536:
writtensize += f.write(blanks)
else:
break
except IOError as e:
if e.errno == errno.ENOSPC:
if len(blanks) > 1:
# Try writing smaller blocks
blanks = blanks[0 : len(blanks) // 2]
else:
break
elif e.errno == errno.EFBIG:
break
else:
raise
if idle and (time.time() - last_idle) > 2:
# Keep the GUI responding, and allow the user to abort.
# Also display the ETA.
yield estimate_completion()
last_idle = time.time()
# Write to OS buffer
try:
f.flush()
except IOError as e:
# IOError: [Errno 28] No space left on device
# seen on Microsoft Windows XP SP3 with ~30GB free space but
# not on another XP SP3 with 64MB free space
if not e.errno == errno.ENOSPC:
logger.error(_("Error #%d when flushing the file buffer." % e.errno))
os.fsync(f.fileno()) # write to disk
# Remember to delete
files.append(f)
# For statistics
total_bytes += f.tell()
# If no bytes were written, then quit.
# See https://github.com/bleachbit/bleachbit/issues/502
if (
start_free_bytes - total_bytes < 2
): # Modified by Marvin to fix the issue #1051 [12/06/2020]
break
# sync to disk
sync()
# statistics
elapsed_sec = time.time() - start_time
rate_mbs = (total_bytes / (1000 * 1000)) / elapsed_sec
logger.info(
_(
"Wrote {files:,} files and {bytes:,} bytes in {seconds:,} seconds at {rate:.2f} MB/s"
).format(
files=len(files), bytes=total_bytes, seconds=int(elapsed_sec), rate=rate_mbs
)
)
# how much free space is left (should be near zero)
if "posix" == os.name:
stats = os.statvfs(pathname)
logger.info(
_(
"{bytes:,} bytes and {inodes:,} inodes available to non-super-user"
).format(bytes=stats.f_bsize * stats.f_bavail, inodes=stats.f_favail)
)
logger.info(
_("{bytes:,} bytes and {inodes:,} inodes available to super-user").format(
bytes=stats.f_bsize * stats.f_bfree, inodes=stats.f_ffree
)
)
# truncate and close files
for f in files:
truncate_f(f)
while True:
try:
# Nikita: I noticed a bug that prevented file handles from
# being closed on FAT32. It sometimes takes two .close() calls
# to do actually close (and therefore delete) a temporary file
f.close()
break
except IOError as e:
if e.errno == 0:
logger.debug(_("Handled unknown error #0 while truncating file."))
time.sleep(0.1)
# explicitly delete
delete(f.name, ignore_missing=True)
|
def wipe_path(pathname, idle=False):
"""Wipe the free space in the path
This function uses an iterator to update the GUI."""
def temporaryfile():
# reference
# http://en.wikipedia.org/wiki/Comparison_of_file_systems#Limits
maxlen = 185
f = None
while True:
try:
f = tempfile.NamedTemporaryFile(
dir=pathname, suffix=__random_string(maxlen), delete=False
)
# In case the application closes prematurely, make sure this
# file is deleted
atexit.register(delete, f.name, allow_shred=False, ignore_missing=True)
break
except OSError as e:
if e.errno in (
errno.ENAMETOOLONG,
errno.ENOSPC,
errno.ENOENT,
errno.EINVAL,
):
# ext3 on Linux 3.5 returns ENOSPC if the full path is greater than 264.
# Shrinking the size helps.
# Microsoft Windows returns ENOENT "No such file or directory"
# or EINVAL "Invalid argument"
# when the path is too long such as %TEMP% but not in C:\
if maxlen > 5:
maxlen -= 5
continue
raise
return f
def estimate_completion():
"""Return (percent, seconds) to complete"""
remaining_bytes = free_space(pathname)
done_bytes = start_free_bytes - remaining_bytes
if done_bytes < 0:
# maybe user deleted large file after starting wipe
done_bytes = 0
if 0 == start_free_bytes:
done_percent = 0
else:
done_percent = 1.0 * done_bytes / (start_free_bytes + 1)
done_time = time.time() - start_time
rate = done_bytes / (done_time + 0.0001) # bytes per second
remaining_seconds = int(remaining_bytes / (rate + 0.0001))
return 1, done_percent, remaining_seconds
logger.debug(_("Wiping path: %s") % pathname)
files = []
total_bytes = 0
start_free_bytes = free_space(pathname)
start_time = time.time()
# Because FAT32 has a maximum file size of 4,294,967,295 bytes,
# this loop is sometimes necessary to create multiple files.
while True:
try:
logger.debug(_("Creating new, temporary file for wiping free space."))
f = temporaryfile()
except OSError as e:
# Linux gives errno 24
# Windows gives errno 28 No space left on device
if e.errno in (errno.EMFILE, errno.ENOSPC):
break
else:
raise
last_idle = time.time()
# Write large blocks to quickly fill the disk.
blanks = b"\0" * 65536
while True:
try:
f.write(blanks)
except IOError as e:
if e.errno == errno.ENOSPC:
if len(blanks) > 1:
# Try writing smaller blocks
blanks = blanks[0 : len(blanks) // 2]
else:
break
elif e.errno == errno.EFBIG:
break
else:
raise
if idle and (time.time() - last_idle) > 2:
# Keep the GUI responding, and allow the user to abort.
# Also display the ETA.
yield estimate_completion()
last_idle = time.time()
# Write to OS buffer
try:
f.flush()
except IOError as e:
# IOError: [Errno 28] No space left on device
# seen on Microsoft Windows XP SP3 with ~30GB free space but
# not on another XP SP3 with 64MB free space
if not e.errno == errno.ENOSPC:
logger.error(_("Error #%d when flushing the file buffer." % e.errno))
os.fsync(f.fileno()) # write to disk
# Remember to delete
files.append(f)
# For statistics
total_bytes += f.tell()
# If no bytes were written, then quit.
# See https://github.com/bleachbit/bleachbit/issues/502
if (
start_free_bytes - total_bytes < 2
): # Modified by Marvin to fix the issue #1051 [12/06/2020]
break
# sync to disk
sync()
# statistics
elapsed_sec = time.time() - start_time
rate_mbs = (total_bytes / (1000 * 1000)) / elapsed_sec
logger.info(
_(
"Wrote {files:,} files and {bytes:,} bytes in {seconds:,} seconds at {rate:.2f} MB/s"
).format(
files=len(files), bytes=total_bytes, seconds=int(elapsed_sec), rate=rate_mbs
)
)
# how much free space is left (should be near zero)
if "posix" == os.name:
stats = os.statvfs(pathname)
logger.info(
_(
"{bytes:,} bytes and {inodes:,} inodes available to non-super-user"
).format(bytes=stats.f_bsize * stats.f_bavail, inodes=stats.f_favail)
)
logger.info(
_("{bytes:,} bytes and {inodes:,} inodes available to super-user").format(
bytes=stats.f_bsize * stats.f_bfree, inodes=stats.f_ffree
)
)
# truncate and close files
for f in files:
truncate_f(f)
while True:
try:
# Nikita: I noticed a bug that prevented file handles from
# being closed on FAT32. It sometimes takes two .close() calls
# to do actually close (and therefore delete) a temporary file
f.close()
break
except IOError as e:
if e.errno == 0:
logger.debug(_("Handled unknown error #0 while truncating file."))
time.sleep(0.1)
# explicitly delete
delete(f.name, ignore_missing=True)
|
https://github.com/bleachbit/bleachbit/issues/1043
|
dzmitry@dzmitry-Satellite-C50-A-L6K:~/Downloads/bleachbit-master$ sudo python3 bleachbit.py
clean_operation('_gui'), options = '[]'
Wiping path: /media/dzmitry/6A17-921B
Creating new, temporary file for wiping free space.
Error #27 when flushing the file buffer.
Creating new, temporary file for wiping free space.
Wrote 2 files and 8,024,788,992 bytes in 892 seconds at 9.00 MB/s
0 bytes and 0 inodes available to non-super-user
0 bytes and 0 inodes available to super-user
Error: _gui.free_disk_space: Function: Overwrite free disk space /media/dzmitry/6A17-921B
Traceback (most recent call last):
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/Worker.py", line 87, in execute
for ret in cmd.execute(self.really_delete):
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/Command.py", line 145, in execute
for func_ret in self.func():
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/Cleaner.py", line 743, in wipe_path_func
for ret in FileUtilities.wipe_path(path, idle=True):
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/FileUtilities.py", line 984, in wipe_path
truncate_f(f)
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/FileUtilities.py", line 680, in truncate_f
f.truncate(0)
File "/usr/lib/python3.5/tempfile.py", line 622, in func_wrapper
return func(*args, **kwargs)
OSError: [Errno 27] File too large
Exception ignored in: <bound method _TemporaryFileCloser.__del__ of <tempfile._TemporaryFileCloser object at 0x7fdac08e44a8>>
Traceback (most recent call last):
File "/usr/lib/python3.5/tempfile.py", line 589, in __del__
self.close()
File "/usr/lib/python3.5/tempfile.py", line 582, in close
self.file.close()
OSError: [Errno 28] No space left on device
Exception ignored in: <bound method _TemporaryFileCloser.__del__ of <tempfile._TemporaryFileCloser object at 0x7fdab9fb7ac8>>
Traceback (most recent call last):
File "/usr/lib/python3.5/tempfile.py", line 589, in __del__
self.close()
File "/usr/lib/python3.5/tempfile.py", line 582, in close
self.file.close()
OSError: [Errno 27] File too large
elapsed time: 892 seconds
|
OSError
|
def dnf_autoremove():
"""Run 'dnf autoremove' and return size in bytes recovered"""
if os.path.exists("/var/run/dnf.pid"):
msg = (
_(
"%s cannot be cleaned because it is currently running. Close it, and try again."
)
% "Dnf"
)
raise RuntimeError(msg)
cmd = ["dnf", "-y", "autoremove"]
process = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
freed_bytes = 0
while True:
line = (
process.stdout.readline()
.decode(bleachbit.stdout_encoding)
.replace("\n", "")
)
if "Error: This command has to be run under the root user." == line:
raise RuntimeError("dnf autoremove >> requires root permissions")
if "Nothing to do." == line:
break
cregex = re.compile("Freed space: ([\d.]+[\s]+[BkMG])")
match = cregex.search(line)
if match:
freed_bytes = parseSize(match.group(1))
break
if "" == line and process.poll() != None:
break
logger.debug("dnf_autoremove >> total freed bytes: %s", freed_bytes)
return freed_bytes
|
def dnf_autoremove():
"""Run 'dnf autoremove' and return size in bytes recovered"""
if os.path.exists("/var/run/dnf.pid"):
msg = (
_(
"%s cannot be cleaned because it is currently running. Close it, and try again."
)
% "Dnf"
)
raise RuntimeError(msg)
cmd = ["dnf", "-y", "autoremove"]
process = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
freed_bytes = 0
while True:
line = process.stdout.readline().replace("\n", "")
if "Error: This command has to be run under the root user." == line:
raise RuntimeError("dnf autoremove >> requires root permissions")
if "Nothing to do." == line:
break
cregex = re.compile("Freed space: ([\d.]+[\s]+[BkMG])")
match = cregex.search(line)
if match:
freed_bytes = parseSize(match.group(1))
break
if "" == line and process.poll() != None:
break
logger.debug("dnf_autoremove >> total freed bytes: %s", freed_bytes)
return freed_bytes
|
https://github.com/bleachbit/bleachbit/issues/983
|
↪ sudo bleachbit
closest monitor (0) geometry = (0, 0)+(3840, 2160), window geometry = (1520, 766)+(1441, 978)
Error: dnf.autoremove: Function: dnf autoremove
Traceback (most recent call last):
File "/usr/share/bleachbit/Worker.py", line 87, in execute
for ret in cmd.execute(self.really_delete):
File "/usr/share/bleachbit/Command.py", line 142, in execute
func_ret = self.func()
File "/usr/share/bleachbit/Unix.py", line 717, in dnf_autoremove
line = process.stdout.readline().replace("\n", "")
TypeError: a bytes-like object is required, not 'str'
subprocess 18280 is still running
<exit code 0>
|
TypeError
|
def clean_json(path, target):
"""Delete key in the JSON file"""
import json
changed = False
targets = target.split("/")
# read file to parser
with open(path, "r", encoding="utf-8") as f:
js = json.load(f)
# change file
pos = js
while True:
new_target = targets.pop(0)
if not isinstance(pos, dict):
break
if new_target in pos and len(targets) > 0:
# descend
pos = pos[new_target]
elif new_target in pos:
# delete terminal target
changed = True
del pos[new_target]
else:
# target not found
break
if 0 == len(targets):
# target not found
break
if changed:
from bleachbit.Options import options
if options.get("shred"):
delete(path, True)
# write file
with open(path, "w", encoding="utf-8") as f:
json.dump(js, f)
|
def clean_json(path, target):
"""Delete key in the JSON file"""
import json
changed = False
targets = target.split("/")
# read file to parser
with open(path, "r") as f:
js = json.load(f)
# change file
pos = js
while True:
new_target = targets.pop(0)
if not isinstance(pos, dict):
break
if new_target in pos and len(targets) > 0:
# descend
pos = pos[new_target]
elif new_target in pos:
# delete terminal target
changed = True
del pos[new_target]
else:
# target not found
break
if 0 == len(targets):
# target not found
break
if changed:
from bleachbit.Options import options
if options.get("shred"):
delete(path, True)
# write file
with open(path, "w") as f:
json.dump(js, f)
|
https://github.com/bleachbit/bleachbit/issues/938
|
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=dns_prefetching/host_referral_list
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=dns_prefetching/startup_list
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=net/http_server_properties/servers
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
|
UnicodeDecodeError
|
def delete_office_registrymodifications(path):
"""Erase LibreOffice 3.4 and Apache OpenOffice.org 3.4 MRU in registrymodifications.xcu"""
import xml.dom.minidom
dom1 = xml.dom.minidom.parse(path)
modified = False
for node in dom1.getElementsByTagName("item"):
if not node.hasAttribute("oor:path"):
continue
if not node.getAttribute("oor:path").startswith(
"/org.openoffice.Office.Histories/Histories/"
):
continue
node.parentNode.removeChild(node)
node.unlink()
modified = True
if modified:
with open(path, "w", encoding="utf-8") as xml_file:
dom1.writexml(xml_file)
|
def delete_office_registrymodifications(path):
"""Erase LibreOffice 3.4 and Apache OpenOffice.org 3.4 MRU in registrymodifications.xcu"""
import xml.dom.minidom
dom1 = xml.dom.minidom.parse(path)
modified = False
for node in dom1.getElementsByTagName("item"):
if not node.hasAttribute("oor:path"):
continue
if not node.getAttribute("oor:path").startswith(
"/org.openoffice.Office.Histories/Histories/"
):
continue
node.parentNode.removeChild(node)
node.unlink()
modified = True
if modified:
with open(path, "w") as xml_file:
dom1.writexml(xml_file)
|
https://github.com/bleachbit/bleachbit/issues/938
|
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=dns_prefetching/host_referral_list
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=dns_prefetching/startup_list
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=net/http_server_properties/servers
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
|
UnicodeDecodeError
|
def delete_ooo_history(path):
"""Erase the OpenOffice.org MRU in Common.xcu. No longer valid in Apache OpenOffice.org 3.4."""
import xml.dom.minidom
dom1 = xml.dom.minidom.parse(path)
changed = False
for node in dom1.getElementsByTagName("node"):
if node.hasAttribute("oor:name"):
if "History" == node.getAttribute("oor:name"):
node.parentNode.removeChild(node)
node.unlink()
changed = True
break
if changed:
dom1.writexml(open(path, "w", encoding="utf-8"))
|
def delete_ooo_history(path):
"""Erase the OpenOffice.org MRU in Common.xcu. No longer valid in Apache OpenOffice.org 3.4."""
import xml.dom.minidom
dom1 = xml.dom.minidom.parse(path)
changed = False
for node in dom1.getElementsByTagName("node"):
if node.hasAttribute("oor:name"):
if "History" == node.getAttribute("oor:name"):
node.parentNode.removeChild(node)
node.unlink()
changed = True
break
if changed:
dom1.writexml(open(path, "w"))
|
https://github.com/bleachbit/bleachbit/issues/938
|
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=dns_prefetching/host_referral_list
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=dns_prefetching/startup_list
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=net/http_server_properties/servers
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
|
UnicodeDecodeError
|
def get_chrome_bookmark_urls(path):
"""Return a list of bookmarked URLs in Google Chrome/Chromium"""
import json
# read file to parser
with open(path, "r", encoding="utf-8") as f:
js = json.load(f)
# empty list
urls = []
# local recursive function
def get_chrome_bookmark_urls_helper(node):
if not isinstance(node, dict):
return
if "type" not in node:
return
if node["type"] == "folder":
# folders have children
for child in node["children"]:
get_chrome_bookmark_urls_helper(child)
if node["type"] == "url" and "url" in node:
urls.append(node["url"])
# find bookmarks
for node in js["roots"]:
get_chrome_bookmark_urls_helper(js["roots"][node])
return list(set(urls)) # unique
|
def get_chrome_bookmark_urls(path):
"""Return a list of bookmarked URLs in Google Chrome/Chromium"""
import json
# read file to parser
with open(path, "r") as f:
js = json.load(f)
# empty list
urls = []
# local recursive function
def get_chrome_bookmark_urls_helper(node):
if not isinstance(node, dict):
return
if "type" not in node:
return
if node["type"] == "folder":
# folders have children
for child in node["children"]:
get_chrome_bookmark_urls_helper(child)
if node["type"] == "url" and "url" in node:
urls.append(node["url"])
# find bookmarks
for node in js["roots"]:
get_chrome_bookmark_urls_helper(js["roots"][node])
return list(set(urls)) # unique
|
https://github.com/bleachbit/bleachbit/issues/938
|
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=dns_prefetching/host_referral_list
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=dns_prefetching/startup_list
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
Błąd: google_chrome.cache: Command to clean JSON file, path=C:\Users\UserXXX\AppData\Local\Google\Chrome\User Data\Default\Preferences, address=net/http_server_properties/servers
Traceback (most recent call last):
File "C:\projects\bleachbit\bleachbit\Worker.py", line 87, in execute
File "C:\projects\bleachbit\bleachbit\Command.py", line 244, in execute
File "C:\projects\bleachbit\bleachbit\FileUtilities.py", line 263, in clean_json
File "C:\Python34\lib\json\__init__.py", line 265, in load
File "C:\Python34\lib\encodings\cp1250.py", line 23, in decode
UnicodeDecodeError: 'charmap' codec can't decode byte 0x98 in position 28203: character maps to <undefined>
|
UnicodeDecodeError
|
def clean_ini(path, section, parameter):
"""Delete sections and parameters (aka option) in the file"""
def write(parser, ini_file):
"""
Reimplementation of the original RowConfigParser write function.
This function is 99% same as its origin. The only change is
removing a cast to str. This is needed to handle unicode chars.
"""
if parser._defaults:
ini_file.write("[%s]\n" % "DEFAULT")
for key, value in parser._defaults.items():
ini_file.write("%s = %s\n" % (key, str(value).replace("\n", "\n\t")))
ini_file.write("\n")
for section in parser._sections:
ini_file.write("[%s]\n" % section)
for key, value in parser._sections[section].items():
if key == "__name__":
continue
if (value is not None) or (parser._optcre == parser.OPTCRE):
# The line bellow is the only changed line of the original function.
# This is the orignal line for reference:
# key = " = ".join((key, str(value).replace('\n', '\n\t')))
key = " = ".join((key, value.replace("\n", "\n\t")))
ini_file.write("%s\n" % (key))
ini_file.write("\n")
encoding = detect_encoding(path) or "utf_8_sig"
# read file to parser
config = bleachbit.RawConfigParser()
config.optionxform = lambda option: option
config.write = write
with open(path, "r", encoding=encoding) as fp:
config.read_file(fp)
# change file
changed = False
if config.has_section(section):
if parameter is None:
changed = True
config.remove_section(section)
elif config.has_option(section, parameter):
changed = True
config.remove_option(section, parameter)
# write file
if changed:
from bleachbit.Options import options
fp.close()
if options.get("shred"):
delete(path, True)
with open(path, "w", encoding=encoding, newline="") as fp:
config.write(config, fp)
|
def clean_ini(path, section, parameter):
"""Delete sections and parameters (aka option) in the file"""
def write(parser, ini_file):
"""
Reimplementation of the original RowConfigParser write function.
This function is 99% same as its origin. The only change is
removing a cast to str. This is needed to handle unicode chars.
"""
if parser._defaults:
ini_file.write("[%s]\n" % "DEFAULT")
for key, value in parser._defaults.items():
ini_file.write("%s = %s\n" % (key, str(value).replace("\n", "\n\t")))
ini_file.write("\n")
for section in parser._sections:
ini_file.write("[%s]\n" % section)
for key, value in parser._sections[section].items():
if key == "__name__":
continue
if (value is not None) or (parser._optcre == parser.OPTCRE):
# The line bellow is the only changed line of the original function.
# This is the orignal line for reference:
# key = " = ".join((key, str(value).replace('\n', '\n\t')))
key = " = ".join((key, value.replace("\n", "\n\t")))
ini_file.write("%s\n" % (key))
ini_file.write("\n")
encoding = detect_encoding(path) or "utf_8_sig"
# read file to parser
config = bleachbit.RawConfigParser()
config.write = write
with open(path, "r", encoding=encoding) as fp:
config.read_file(fp)
# change file
changed = False
if config.has_section(section):
if parameter is None:
changed = True
config.remove_section(section)
elif config.has_option(section, parameter):
changed = True
config.remove_option(section, parameter)
# write file
if changed:
from bleachbit.Options import options
fp.close()
if options.get("shred"):
delete(path, True)
with open(path, "w", encoding=encoding, newline="") as fp:
config.write(config, fp)
|
https://github.com/bleachbit/bleachbit/issues/812
|
clean_operation('vlc'), options = '['mru']'
Error: vlc.mru: Command to clean .ini path=/home/dzmitry/.config/vlc/vlc-qt-interface.conf, section=General, parameter=filedialog-path
Traceback (most recent call last):
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/Worker.py", line 87, in execute
for ret in cmd.execute(self.really_delete):
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/Command.py", line 210, in execute
FileUtilities.clean_ini(self.path, self.section, self.parameter)
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/FileUtilities.py", line 232, in clean_ini
config.read_file(fp)
File "/usr/lib/python3.5/configparser.py", line 715, in read_file
self._read(f, source)
File "/usr/lib/python3.5/configparser.py", line 1089, in _read
fpname, lineno)
configparser.DuplicateOptionError: While reading from '/home/dzmitry/.config/vlc/vlc-qt-interface.conf' [line 16]: option 'bgsize' in section 'MainWindow' already exists
Error: vlc.mru: Command to clean .ini path=/home/dzmitry/.config/vlc/vlc-qt-interface.conf, section=RecentsMRL, parameter=None
Traceback (most recent call last):
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/Worker.py", line 87, in execute
for ret in cmd.execute(self.really_delete):
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/Command.py", line 210, in execute
FileUtilities.clean_ini(self.path, self.section, self.parameter)
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/FileUtilities.py", line 232, in clean_ini
config.read_file(fp)
File "/usr/lib/python3.5/configparser.py", line 715, in read_file
self._read(f, source)
File "/usr/lib/python3.5/configparser.py", line 1089, in _read
fpname, lineno)
configparser.DuplicateOptionError: While reading from '/home/dzmitry/.config/vlc/vlc-qt-interface.conf' [line 16]: option 'bgsize' in section 'MainWindow' already exists
Error: vlc.mru: Command to clean .ini path=/home/dzmitry/.config/vlc/vlc-qt-interface.conf, section=OpenDialog, parameter=netMRL
Traceback (most recent call last):
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/Worker.py", line 87, in execute
for ret in cmd.execute(self.really_delete):
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/Command.py", line 210, in execute
FileUtilities.clean_ini(self.path, self.section, self.parameter)
File "/home/dzmitry/Downloads/bleachbit-master/bleachbit/FileUtilities.py", line 232, in clean_ini
config.read_file(fp)
File "/usr/lib/python3.5/configparser.py", line 715, in read_file
self._read(f, source)
File "/usr/lib/python3.5/configparser.py", line 1089, in _read
fpname, lineno)
configparser.DuplicateOptionError: While reading from '/home/dzmitry/.config/vlc/vlc-qt-interface.conf' [line 16]: option 'bgsize' in section 'MainWindow' already exists
Disk space recovered: 0
Files deleted: 0
Errors: 3
elapsed time: 0 seconds
|
configparser.DuplicateOptionError
|
def get_subpaths(self, basepath):
"""Returns direct subpaths for this object, i.e. either the named subfolder or all
subfolders matching the pattern"""
if isinstance(self.pattern, Pattern):
return (
os.path.join(basepath, p)
for p in os.listdir(basepath)
if self.pattern.match(p) and os.path.isdir(os.path.join(basepath, p))
)
else:
path = os.path.join(basepath, self.pattern)
return [path] if os.path.isdir(path) else []
|
def get_subpaths(self, basepath):
"""Returns direct subpaths for this object, i.e. either the named subfolder or all
subfolders matching the pattern"""
if isinstance(self.pattern, re._pattern_type):
return (
os.path.join(basepath, p)
for p in os.listdir(basepath)
if self.pattern.match(p) and os.path.isdir(os.path.join(basepath, p))
)
else:
path = os.path.join(basepath, self.pattern)
return [path] if os.path.isdir(path) else []
|
https://github.com/bleachbit/bleachbit/issues/759
|
======================================================================
FAIL: test_whitelisted_posix_symlink (tests.TestFileUtilities.FileUtilitiesTestCase)
Symlink test for whitelisted_posix()
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/common.py", line 160, in wrapper
return f(*args, **kwargs)
File "/tmp/bleachbit/tests/TestFileUtilities.py", line 792, in test_whitelisted_posix_symlink
realpath = self.write_file('real')
File "/tmp/bleachbit/tests/common.py", line 116, in write_file
assert (os.path.exists(extended_path(filename)))
AssertionError
======================================================================
FAIL: test_wipe_contents (tests.TestFileUtilities.FileUtilitiesTestCase)
Unit test for wipe_delete()
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/TestFileUtilities.py", line 844, in test_wipe_contents
filename = self.write_file('bleachbit-test-wipe', b'abcdefghij' * 12345)
File "/tmp/bleachbit/tests/common.py", line 116, in write_file
assert (os.path.exists(extended_path(filename)))
AssertionError
======================================================================
FAIL: test_wipe_name (tests.TestFileUtilities.FileUtilitiesTestCase)
Unit test for wipe_name()
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/TestFileUtilities.py", line 880, in test_wipe_name
filename = self.write_file('bleachbit-test-wipe' + '0' * 50)
File "/tmp/bleachbit/tests/common.py", line 116, in write_file
assert (os.path.exists(extended_path(filename)))
AssertionError
======================================================================
FAIL: test_fakelocalizationdirs (tests.TestUnix.UnixTestCase)
Create a faked localization hierarchy and clean it afterwards
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/common.py", line 160, in wrapper
return f(*args, **kwargs)
File "/tmp/bleachbit/tests/TestUnix.py", line 185, in test_fakelocalizationdirs
self.write_file(path)
File "/tmp/bleachbit/tests/common.py", line 116, in write_file
assert (os.path.exists(extended_path(filename)))
AssertionError
======================================================================
FAIL: test_is_running (tests.TestUnix.UnixTestCase)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/common.py", line 160, in wrapper
return f(*args, **kwargs)
File "/tmp/bleachbit/tests/TestUnix.py", line 105, in test_is_running
self.assertTrue(is_running(exe))
AssertionError: False is not true
======================================================================
FAIL: test_many_patterns (tests.TestWinapp.WinappTestCase)
Test a cleaner like Steam Installers and related performance improvement
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/TestWinapp.py", line 439, in test_many_patterns
common.touch_file(tmp_fn)
File "/tmp/bleachbit/tests/common.py", line 182, in touch_file
assert(os.path.exists(filename))
AssertionError
----------------------------------------------------------------------
Ran 167 tests in 74.622s
FAILED (failures=21, errors=39, skipped=26)
make: *** [Makefile:107: tests] Error 1
|
AssertionError
|
def get_localizations(self, basepath):
"""Returns all localization items for this object and all descendant objects"""
for path in self.get_subpaths(basepath):
for child in self.children:
if isinstance(child, LocaleCleanerPath):
for res in child.get_localizations(path):
yield res
elif isinstance(child, Pattern):
for element in os.listdir(path):
match = child.match(element)
if match is not None:
yield (
match.group("locale"),
match.group("specifier"),
os.path.join(path, element),
)
|
def get_localizations(self, basepath):
"""Returns all localization items for this object and all descendant objects"""
for path in self.get_subpaths(basepath):
for child in self.children:
if isinstance(child, LocaleCleanerPath):
for res in child.get_localizations(path):
yield res
elif isinstance(child, re._pattern_type):
for element in os.listdir(path):
match = child.match(element)
if match is not None:
yield (
match.group("locale"),
match.group("specifier"),
os.path.join(path, element),
)
|
https://github.com/bleachbit/bleachbit/issues/759
|
======================================================================
FAIL: test_whitelisted_posix_symlink (tests.TestFileUtilities.FileUtilitiesTestCase)
Symlink test for whitelisted_posix()
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/common.py", line 160, in wrapper
return f(*args, **kwargs)
File "/tmp/bleachbit/tests/TestFileUtilities.py", line 792, in test_whitelisted_posix_symlink
realpath = self.write_file('real')
File "/tmp/bleachbit/tests/common.py", line 116, in write_file
assert (os.path.exists(extended_path(filename)))
AssertionError
======================================================================
FAIL: test_wipe_contents (tests.TestFileUtilities.FileUtilitiesTestCase)
Unit test for wipe_delete()
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/TestFileUtilities.py", line 844, in test_wipe_contents
filename = self.write_file('bleachbit-test-wipe', b'abcdefghij' * 12345)
File "/tmp/bleachbit/tests/common.py", line 116, in write_file
assert (os.path.exists(extended_path(filename)))
AssertionError
======================================================================
FAIL: test_wipe_name (tests.TestFileUtilities.FileUtilitiesTestCase)
Unit test for wipe_name()
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/TestFileUtilities.py", line 880, in test_wipe_name
filename = self.write_file('bleachbit-test-wipe' + '0' * 50)
File "/tmp/bleachbit/tests/common.py", line 116, in write_file
assert (os.path.exists(extended_path(filename)))
AssertionError
======================================================================
FAIL: test_fakelocalizationdirs (tests.TestUnix.UnixTestCase)
Create a faked localization hierarchy and clean it afterwards
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/common.py", line 160, in wrapper
return f(*args, **kwargs)
File "/tmp/bleachbit/tests/TestUnix.py", line 185, in test_fakelocalizationdirs
self.write_file(path)
File "/tmp/bleachbit/tests/common.py", line 116, in write_file
assert (os.path.exists(extended_path(filename)))
AssertionError
======================================================================
FAIL: test_is_running (tests.TestUnix.UnixTestCase)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/common.py", line 160, in wrapper
return f(*args, **kwargs)
File "/tmp/bleachbit/tests/TestUnix.py", line 105, in test_is_running
self.assertTrue(is_running(exe))
AssertionError: False is not true
======================================================================
FAIL: test_many_patterns (tests.TestWinapp.WinappTestCase)
Test a cleaner like Steam Installers and related performance improvement
----------------------------------------------------------------------
Traceback (most recent call last):
File "/tmp/bleachbit/tests/TestWinapp.py", line 439, in test_many_patterns
common.touch_file(tmp_fn)
File "/tmp/bleachbit/tests/common.py", line 182, in touch_file
assert(os.path.exists(filename))
AssertionError
----------------------------------------------------------------------
Ran 167 tests in 74.622s
FAILED (failures=21, errors=39, skipped=26)
make: *** [Makefile:107: tests] Error 1
|
AssertionError
|
def __shred_sqlite_char_columns(table, cols=None, where=""):
"""Create an SQL command to shred character columns"""
cmd = ""
if not where:
# If None, set to empty string.
where = ""
if cols and options.get("shred"):
cmd += "update or ignore %s set %s %s;" % (
table,
",".join(["%s = randomblob(length(%s))" % (col, col) for col in cols]),
where,
)
cmd += "update or ignore %s set %s %s;" % (
table,
",".join(["%s = zeroblob(length(%s))" % (col, col) for col in cols]),
where,
)
cmd += "delete from %s %s;" % (table, where)
return cmd
|
def __shred_sqlite_char_columns(table, cols=None, where=""):
"""Create an SQL command to shred character columns"""
cmd = ""
if cols and options.get("shred"):
cmd += "update or ignore %s set %s %s;" % (
table,
",".join(["%s = randomblob(length(%s))" % (col, col) for col in cols]),
where,
)
cmd += "update or ignore %s set %s %s;" % (
table,
",".join(["%s = zeroblob(length(%s))" % (col, col) for col in cols]),
where,
)
cmd += "delete from %s %s;" % (table, where)
return cmd
|
https://github.com/bleachbit/bleachbit/issues/744
|
Error: google_chrome.history: Function: Clean file: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
Traceback (most recent call last):
File "bleachbit\Worker.pyo", line 88, in execute
File "bleachbit\Command.pyo", line 162, in execute
File "bleachbit\Special.pyo", line 199, in delete_chrome_history
File "bleachbit\FileUtilities.pyo", line 406, in execute_sqlite3
OperationalError: no such column: lower_term: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
ERROR:bleachbit.Worker:Error: google_chrome.history: Function: Clean file: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
Traceback (most recent call last):
File "bleachbit\Worker.pyo", line 88, in execute
File "bleachbit\Command.pyo", line 162, in execute
File "bleachbit\Special.pyo", line 199, in delete_chrome_history
File "bleachbit\FileUtilities.pyo", line 406, in execute_sqlite3
OperationalError: no such column: lower_term: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
|
OperationalError
|
def delete_chrome_favicons(path):
"""Delete Google Chrome and Chromium favicons not use in in history for bookmarks"""
path_history = os.path.join(os.path.dirname(path), "History")
if os.path.exists(path_history):
ver = __get_chrome_history(path)
else:
# assume it's the newer version
ver = 38
cmds = ""
if ver >= 4:
# Version 4 includes Chromium 12
# Version 20 includes Chromium 14, Google Chrome 15, Google Chrome 19
# Version 22 includes Google Chrome 20
# Version 25 is Google Chrome 26
# Version 26 is Google Chrome 29
# Version 28 is Google Chrome 30
# Version 29 is Google Chrome 37
# Version 32 is Google Chrome 51
# Version 36 is Google Chrome 60
# Version 38 is Google Chrome 64
# Version 42 is Google Chrome 79
# icon_mapping
cols = ("page_url",)
where = None
if os.path.exists(path_history):
cmds += 'attach database "%s" as History;' % path_history
where = "where page_url not in (select distinct url from History.urls)"
cmds += __shred_sqlite_char_columns("icon_mapping", cols, where)
# favicon images
cols = ("image_data",)
where = "where icon_id not in (select distinct icon_id from icon_mapping)"
cmds += __shred_sqlite_char_columns("favicon_bitmaps", cols, where)
# favicons
# Google Chrome 30 (database version 28): image_data moved to table
# favicon_bitmaps
if ver < 28:
cols = ("url", "image_data")
else:
cols = ("url",)
where = "where id not in (select distinct icon_id from icon_mapping)"
cmds += __shred_sqlite_char_columns("favicons", cols, where)
elif 3 == ver:
# Version 3 includes Google Chrome 11
cols = ("url", "image_data")
where = None
if os.path.exists(path_history):
cmds += 'attach database "%s" as History;' % path_history
where = "where id not in(select distinct favicon_id from History.urls)"
cmds += __shred_sqlite_char_columns("favicons", cols, where)
else:
raise RuntimeError("%s is version %d" % (path, ver))
FileUtilities.execute_sqlite3(path, cmds)
|
def delete_chrome_favicons(path):
"""Delete Google Chrome and Chromium favicons not use in in history for bookmarks"""
path_history = os.path.join(os.path.dirname(path), "History")
ver = __get_chrome_history(path)
cmds = ""
if ver >= 4:
# Version 4 includes Chromium 12
# Version 20 includes Chromium 14, Google Chrome 15, Google Chrome 19
# Version 22 includes Google Chrome 20
# Version 25 is Google Chrome 26
# Version 26 is Google Chrome 29
# Version 28 is Google Chrome 30
# Version 29 is Google Chrome 37
# Version 32 is Google Chrome 51
# Version 36 is Google Chrome 60
# Version 38 is Google Chrome 64
# icon_mapping
cols = ("page_url",)
where = None
if os.path.exists(path_history):
cmds += 'attach database "%s" as History;' % path_history
where = "where page_url not in (select distinct url from History.urls)"
cmds += __shred_sqlite_char_columns("icon_mapping", cols, where)
# favicon images
cols = ("image_data",)
where = "where icon_id not in (select distinct icon_id from icon_mapping)"
cmds += __shred_sqlite_char_columns("favicon_bitmaps", cols, where)
# favicons
# Google Chrome 30 (database version 28): image_data moved to table
# favicon_bitmaps
if ver < 28:
cols = ("url", "image_data")
else:
cols = ("url",)
where = "where id not in (select distinct icon_id from icon_mapping)"
cmds += __shred_sqlite_char_columns("favicons", cols, where)
elif 3 == ver:
# Version 3 includes Google Chrome 11
cols = ("url", "image_data")
where = None
if os.path.exists(path_history):
cmds += 'attach database "%s" as History;' % path_history
where = "where id not in(select distinct favicon_id from History.urls)"
cmds += __shred_sqlite_char_columns("favicons", cols, where)
else:
raise RuntimeError("%s is version %d" % (path, ver))
FileUtilities.execute_sqlite3(path, cmds)
|
https://github.com/bleachbit/bleachbit/issues/744
|
Error: google_chrome.history: Function: Clean file: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
Traceback (most recent call last):
File "bleachbit\Worker.pyo", line 88, in execute
File "bleachbit\Command.pyo", line 162, in execute
File "bleachbit\Special.pyo", line 199, in delete_chrome_history
File "bleachbit\FileUtilities.pyo", line 406, in execute_sqlite3
OperationalError: no such column: lower_term: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
ERROR:bleachbit.Worker:Error: google_chrome.history: Function: Clean file: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
Traceback (most recent call last):
File "bleachbit\Worker.pyo", line 88, in execute
File "bleachbit\Command.pyo", line 162, in execute
File "bleachbit\Special.pyo", line 199, in delete_chrome_history
File "bleachbit\FileUtilities.pyo", line 406, in execute_sqlite3
OperationalError: no such column: lower_term: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
|
OperationalError
|
def delete_chrome_history(path):
"""Clean history from History and Favicon files without affecting bookmarks"""
if not os.path.exists(path):
logger.debug(
"aborting delete_chrome_history() because history does not exist: %s" % path
)
return
cols = ("url", "title")
where = ""
ids_int = get_chrome_bookmark_ids(path)
if ids_int:
ids_str = ",".join([str(id0) for id0 in ids_int])
where = "where id not in (%s) " % ids_str
cmds = __shred_sqlite_char_columns("urls", cols, where)
cmds += __shred_sqlite_char_columns("visits")
# Google Chrome 79 no longer has lower_term in keyword_search_terms
cols = ("term",)
cmds += __shred_sqlite_char_columns("keyword_search_terms", cols)
ver = __get_chrome_history(path)
if ver >= 20:
# downloads, segments, segment_usage first seen in Chrome 14,
# Google Chrome 15 (database version = 20).
# Google Chrome 30 (database version 28) doesn't have full_path, but it
# does have current_path and target_path
if ver >= 28:
cmds += __shred_sqlite_char_columns(
"downloads", ("current_path", "target_path")
)
cmds += __shred_sqlite_char_columns("downloads_url_chains", ("url",))
else:
cmds += __shred_sqlite_char_columns("downloads", ("full_path", "url"))
cmds += __shred_sqlite_char_columns("segments", ("name",))
cmds += __shred_sqlite_char_columns("segment_usage")
FileUtilities.execute_sqlite3(path, cmds)
|
def delete_chrome_history(path):
"""Clean history from History and Favicon files without affecting bookmarks"""
cols = ("url", "title")
where = ""
ids_int = get_chrome_bookmark_ids(path)
if ids_int:
ids_str = ",".join([str(id0) for id0 in ids_int])
where = "where id not in (%s) " % ids_str
cmds = __shred_sqlite_char_columns("urls", cols, where)
cmds += __shred_sqlite_char_columns("visits")
# Google Chrome 79 no longer has lower_term in keyword_search_terms
cols = ("term",)
cmds += __shred_sqlite_char_columns("keyword_search_terms", cols)
ver = __get_chrome_history(path)
if ver >= 20:
# downloads, segments, segment_usage first seen in Chrome 14,
# Google Chrome 15 (database version = 20).
# Google Chrome 30 (database version 28) doesn't have full_path, but it
# does have current_path and target_path
if ver >= 28:
cmds += __shred_sqlite_char_columns(
"downloads", ("current_path", "target_path")
)
cmds += __shred_sqlite_char_columns("downloads_url_chains", ("url",))
else:
cmds += __shred_sqlite_char_columns("downloads", ("full_path", "url"))
cmds += __shred_sqlite_char_columns("segments", ("name",))
cmds += __shred_sqlite_char_columns("segment_usage")
FileUtilities.execute_sqlite3(path, cmds)
|
https://github.com/bleachbit/bleachbit/issues/744
|
Error: google_chrome.history: Function: Clean file: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
Traceback (most recent call last):
File "bleachbit\Worker.pyo", line 88, in execute
File "bleachbit\Command.pyo", line 162, in execute
File "bleachbit\Special.pyo", line 199, in delete_chrome_history
File "bleachbit\FileUtilities.pyo", line 406, in execute_sqlite3
OperationalError: no such column: lower_term: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
ERROR:bleachbit.Worker:Error: google_chrome.history: Function: Clean file: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
Traceback (most recent call last):
File "bleachbit\Worker.pyo", line 88, in execute
File "bleachbit\Command.pyo", line 162, in execute
File "bleachbit\Special.pyo", line 199, in delete_chrome_history
File "bleachbit\FileUtilities.pyo", line 406, in execute_sqlite3
OperationalError: no such column: lower_term: C:\Users\<redacted>\AppData\Local\Google\Chrome\User Data\Default\History
|
OperationalError
|
def download_url_to_fn(url, fn, on_error=None, max_retries=2, backoff_factor=0.5):
"""Download a URL to the given filename"""
logger.info("Downloading %s to %s", url, fn)
import requests
import sys
if hasattr(sys, "frozen"):
# when frozen by py2exe, certificates are in alternate location
CA_BUNDLE = os.path.join(bleachbit_exe_path, "cacert.pem")
requests.utils.DEFAULT_CA_BUNDLE_PATH = CA_BUNDLE
requests.adapters.DEFAULT_CA_BUNDLE_PATH = CA_BUNDLE
from urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
session = requests.Session()
# 408: request timeout
# 429: too many requests
# 500: internal server error
# 502: bad gateway
# 503: service unavailable
# 504: gateway_timeout
status_forcelist = (408, 429, 500, 502, 503, 504)
# sourceforge.net directories to download mirror
retries = Retry(
total=max_retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist,
redirect=5,
)
session.mount("http://", HTTPAdapter(max_retries=retries))
msg = _("Downloading url failed: %s") % url
from bleachbit.Update import user_agent
headers = {"user_agent": user_agent()}
def do_error(msg2):
if on_error:
on_error(msg, msg2)
from bleachbit.FileUtilities import delete
delete(fn, ignore_missing=True) # delete any partial download
try:
response = session.get(url, headers=headers)
content = response.content
except requests.exceptions.RequestException as exc:
msg2 = "{}: {}".format(type(exc).__name__, exc)
logger.exception(msg)
do_error(msg2)
return False
else:
if not response.status_code == 200:
logger.error(msg)
msg2 = "Status code: %s" % response.status_code
do_error(msg2)
return False
with open(fn, "wb") as f:
f.write(content)
return True
|
def download_url_to_fn(url, fn, on_error=None, max_retries=2, backoff_factor=0.5):
"""Download a URL to the given filename"""
logger.info("Downloading %s to %s", url, fn)
import requests
from urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
session = requests.Session()
# 408: request timeout
# 429: too many requests
# 500: internal server error
# 502: bad gateway
# 503: service unavailable
# 504: gateway_timeout
status_forcelist = (408, 429, 500, 502, 503, 504)
# sourceforge.net directories to download mirror
retries = Retry(
total=max_retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist,
redirect=5,
)
session.mount("http://", HTTPAdapter(max_retries=retries))
msg = _("Downloading url failed: %s") % url
from bleachbit.Update import user_agent
headers = {"user_agent": user_agent()}
def do_error(msg2):
if on_error:
on_error(msg, msg2)
from bleachbit.FileUtilities import delete
delete(fn, ignore_missing=True) # delete any partial download
try:
response = session.get(url, headers=headers)
content = response.content
except requests.exceptions.RequestException as exc:
msg2 = "{}: {}".format(type(exc).__name__, exc)
logger.exception(msg)
do_error(msg2)
return False
else:
if not response.status_code == 200:
logger.error(msg)
msg2 = "Status code: %s" % response.status_code
do_error(msg2)
return False
with open(fn, "wb") as f:
f.write(content)
return True
|
https://github.com/bleachbit/bleachbit/issues/643
|
Downloading https://sourceforge.net/projects/bleachbit/files/chaff/clinton_subject_model.json.bz2/download to C:\temp\sig\BleachBit-2.3-portable\BleachBit-Portable\clinton_subject_model.json.bz2
Traceback (most recent call last):
File "bleachbit\GuiChaff.pyo", line 160, in on_make_files
File "bleachbit\GuiChaff.pyo", line 140, in download_models_dialog
File "bleachbit\GuiChaff.pyo", line 130, in download_models_gui
File "bleachbit\Chaff.pyo", line 173, in download_models
File "bleachbit\Chaff.pyo", line 112, in download_url_to_fn
ImportError: No module named requests
|
ImportError
|
def build():
"""Build the application"""
logger.info("Deleting directories build and dist")
shutil.rmtree("build", ignore_errors=True)
shutil.rmtree("dist", ignore_errors=True)
shutil.rmtree("BleachBit-Portable", ignore_errors=True)
logger.info("Running py2exe")
shutil.copyfile("bleachbit.py", "bleachbit_console.py")
cmd = sys.executable + " -OO setup.py py2exe"
run_cmd(cmd)
assert_exist("dist\\bleachbit.exe")
assert_exist("dist\\bleachbit_console.exe")
os.remove("bleachbit_console.py")
if not os.path.exists("dist"):
os.makedirs("dist")
logger.info("Copying GTK files and icon")
copytree(GTK_DIR + "\\etc", "dist\\etc")
copytree(GTK_DIR + "\\lib", "dist\\lib")
for subpath in ["fontconfig", "fonts", "icons", "themes"]:
copytree(os.path.join(GTK_DIR, "share", subpath), "dist\\share\\" + subpath)
SCHEMAS_DIR = "share\\glib-2.0\\schemas"
os.makedirs(os.path.join("dist", SCHEMAS_DIR))
shutil.copyfile(
os.path.join(GTK_DIR, SCHEMAS_DIR, "gschemas.compiled"),
os.path.join("dist", SCHEMAS_DIR, "gschemas.compiled"),
)
shutil.copyfile("bleachbit.png", "dist\\share\\bleachbit.png")
for dll in glob.glob1(GTK_DIR, "*.dll"):
shutil.copyfile(os.path.join(GTK_DIR, dll), "dist\\" + dll)
os.mkdir("dist\\data")
shutil.copyfile("data\\app-menu.ui", "dist\\data\\app-menu.ui")
logger.info("Copying CA bundle")
import requests
shutil.copyfile(
requests.utils.DEFAULT_CA_BUNDLE_PATH, os.path.join("dist", "cacert.pem")
)
logger.info("Copying BleachBit localizations")
shutil.rmtree("dist\\share\\locale", ignore_errors=True)
copytree("locale", "dist\\share\\locale")
assert_exist("dist\\share\\locale\\es\\LC_MESSAGES\\bleachbit.mo")
logger.info("Copying BleachBit cleaners")
if not os.path.exists("dist\\share\\cleaners"):
os.makedirs("dist\\share\\cleaners")
cleaners_files = recursive_glob("cleaners", ["*.xml"])
for file in cleaners_files:
shutil.copy(file, "dist\\share\\cleaners")
logger.info("Checking for CleanerML")
assert_exist("dist\\share\\cleaners\\internet_explorer.xml")
logger.info("Copying license")
shutil.copy("COPYING", "dist")
sign_code("dist\\bleachbit.exe")
sign_code("dist\\bleachbit_console.exe")
assert_execute_console()
|
def build():
"""Build the application"""
logger.info("Deleting directories build and dist")
shutil.rmtree("build", ignore_errors=True)
shutil.rmtree("dist", ignore_errors=True)
shutil.rmtree("BleachBit-Portable", ignore_errors=True)
logger.info("Running py2exe")
shutil.copyfile("bleachbit.py", "bleachbit_console.py")
cmd = sys.executable + " -OO setup.py py2exe"
run_cmd(cmd)
assert_exist("dist\\bleachbit.exe")
assert_exist("dist\\bleachbit_console.exe")
os.remove("bleachbit_console.py")
if not os.path.exists("dist"):
os.makedirs("dist")
logger.info("Copying GTK files and icon")
copytree(GTK_DIR + "\\etc", "dist\\etc")
copytree(GTK_DIR + "\\lib", "dist\\lib")
for subpath in ["fontconfig", "fonts", "icons", "themes"]:
copytree(os.path.join(GTK_DIR, "share", subpath), "dist\\share\\" + subpath)
SCHEMAS_DIR = "share\\glib-2.0\\schemas"
os.makedirs(os.path.join("dist", SCHEMAS_DIR))
shutil.copyfile(
os.path.join(GTK_DIR, SCHEMAS_DIR, "gschemas.compiled"),
os.path.join("dist", SCHEMAS_DIR, "gschemas.compiled"),
)
shutil.copyfile("bleachbit.png", "dist\\share\\bleachbit.png")
for dll in glob.glob1(GTK_DIR, "*.dll"):
shutil.copyfile(os.path.join(GTK_DIR, dll), "dist\\" + dll)
os.mkdir("dist\\data")
shutil.copyfile("data\\app-menu.ui", "dist\\data\\app-menu.ui")
logger.info("Copying BleachBit localizations")
shutil.rmtree("dist\\share\\locale", ignore_errors=True)
copytree("locale", "dist\\share\\locale")
assert_exist("dist\\share\\locale\\es\\LC_MESSAGES\\bleachbit.mo")
logger.info("Copying BleachBit cleaners")
if not os.path.exists("dist\\share\\cleaners"):
os.makedirs("dist\\share\\cleaners")
cleaners_files = recursive_glob("cleaners", ["*.xml"])
for file in cleaners_files:
shutil.copy(file, "dist\\share\\cleaners")
logger.info("Checking for CleanerML")
assert_exist("dist\\share\\cleaners\\internet_explorer.xml")
logger.info("Copying license")
shutil.copy("COPYING", "dist")
sign_code("dist\\bleachbit.exe")
sign_code("dist\\bleachbit_console.exe")
assert_execute_console()
|
https://github.com/bleachbit/bleachbit/issues/643
|
Downloading https://sourceforge.net/projects/bleachbit/files/chaff/clinton_subject_model.json.bz2/download to C:\temp\sig\BleachBit-2.3-portable\BleachBit-Portable\clinton_subject_model.json.bz2
Traceback (most recent call last):
File "bleachbit\GuiChaff.pyo", line 160, in on_make_files
File "bleachbit\GuiChaff.pyo", line 140, in download_models_dialog
File "bleachbit\GuiChaff.pyo", line 130, in download_models_gui
File "bleachbit\Chaff.pyo", line 173, in download_models
File "bleachbit\Chaff.pyo", line 112, in download_url_to_fn
ImportError: No module named requests
|
ImportError
|
def delete_unnecessary():
logger.info("Deleting unnecessary files")
# Remove SVG to reduce space and avoid this error
# Error loading theme icon 'dialog-warning' for stock: Unable to load image-loading module: C:/Python27/Lib/site-packages/gtk-2.0/runtime/lib/gdk-pixbuf-2.0/2.10.0/loaders/libpixbufloader-svg.dll: `C:/Python27/Lib/site-packages/gtk-2.0/runtime/lib/gdk-pixbuf-2.0/2.10.0/loaders/libpixbufloader-svg.dll': The specified module could not be found.
# https://bugs.launchpad.net/bleachbit/+bug/1650907
delete_paths = [
r"_win32sysloader.pyd",
r"lib\gdk-pixbuf-2.0",
r"lib\gdbus-2.0",
r"perfmon.pyd",
r"servicemanager.pyd",
r"share\themes\default",
r"share\themes\emacs",
r"share\fontconfig",
r"share\icons\highcontrast",
r"share\themes",
r"win32evtlog.pyd",
r"win32pipe.pyd",
r"win32wnet.pyd",
]
for path in delete_paths:
path = r"dist\{}".format(path)
if not os.path.exists(path):
logger.warning("Path does not exist: " + path)
continue
if os.path.isdir(path):
this_dir_size = get_dir_size(path)
shutil.rmtree(path, ignore_errors=True)
logger.info(
"Deleting directory {} saved {:,} B".format(path, this_dir_size)
)
else:
logger.info(
"Deleting file {} saved {:,} B".format(path, os.path.getsize(path))
)
os.remove(path)
# by wildcard with recursive search
delete_wildcards = [
"*.a",
"*.def",
"*.lib",
"atk10.mo",
"gdk-pixbuf.mo",
"gettext-runtime.mo",
"glib20.mo",
"gtk20-properties.mo",
"libgsf.mo",
]
for wc in delete_wildcards:
total_size = 0
for f in recursive_glob("dist", [wc]):
total_size += os.path.getsize(f)
os.remove(f)
logger.info("Deleting wildcard {} saved {:,}B".format(wc, total_size))
|
def delete_unnecessary():
logger.info("Deleting unnecessary files")
# Remove SVG to reduce space and avoid this error
# Error loading theme icon 'dialog-warning' for stock: Unable to load image-loading module: C:/Python27/Lib/site-packages/gtk-2.0/runtime/lib/gdk-pixbuf-2.0/2.10.0/loaders/libpixbufloader-svg.dll: `C:/Python27/Lib/site-packages/gtk-2.0/runtime/lib/gdk-pixbuf-2.0/2.10.0/loaders/libpixbufloader-svg.dll': The specified module could not be found.
# https://bugs.launchpad.net/bleachbit/+bug/1650907
delete_paths = [
r"_win32sysloader.pyd",
r"lib\gdk-pixbuf-2.0",
r"lib\gdbus-2.0",
r"perfmon.pyd",
r"select.pyd",
r"servicemanager.pyd",
r"share\themes\default",
r"share\themes\emacs",
r"share\fontconfig",
r"share\icons\highcontrast",
r"share\themes",
r"win32evtlog.pyd",
r"win32pipe.pyd",
r"win32wnet.pyd",
]
for path in delete_paths:
path = r"dist\{}".format(path)
if not os.path.exists(path):
logger.warning("Path does not exist: " + path)
continue
if os.path.isdir(path):
this_dir_size = get_dir_size(path)
shutil.rmtree(path, ignore_errors=True)
logger.info(
"Deleting directory {} saved {:,} B".format(path, this_dir_size)
)
else:
logger.info(
"Deleting file {} saved {:,} B".format(path, os.path.getsize(path))
)
os.remove(path)
# by wildcard with recursive search
delete_wildcards = [
"*.a",
"*.def",
"*.lib",
"atk10.mo",
"gdk-pixbuf.mo",
"gettext-runtime.mo",
"glib20.mo",
"gtk20-properties.mo",
"libgsf.mo",
]
for wc in delete_wildcards:
total_size = 0
for f in recursive_glob("dist", [wc]):
total_size += os.path.getsize(f)
os.remove(f)
logger.info("Deleting wildcard {} saved {:,}B".format(wc, total_size))
|
https://github.com/bleachbit/bleachbit/issues/643
|
Downloading https://sourceforge.net/projects/bleachbit/files/chaff/clinton_subject_model.json.bz2/download to C:\temp\sig\BleachBit-2.3-portable\BleachBit-Portable\clinton_subject_model.json.bz2
Traceback (most recent call last):
File "bleachbit\GuiChaff.pyo", line 160, in on_make_files
File "bleachbit\GuiChaff.pyo", line 140, in download_models_dialog
File "bleachbit\GuiChaff.pyo", line 130, in download_models_gui
File "bleachbit\Chaff.pyo", line 173, in download_models
File "bleachbit\Chaff.pyo", line 112, in download_url_to_fn
ImportError: No module named requests
|
ImportError
|
def set_environ(varname, path):
"""Define an environment variable for use in CleanerML and Winapp2.ini"""
if not path:
return
if varname in os.environ:
# logger.debug('set_environ(%s, %s): skipping because environment variable is already defined', varname, path)
if "nt" == os.name:
os.environ[varname] = bleachbit.expandvars("%%%s%%" % varname).encode(
"utf-8"
)
# Do not redefine the environment variable when it already exists
# But re-encode them with utf-8 instead of mbcs
return
try:
if not os.path.exists(path):
raise RuntimeError(
"Variable %s points to a non-existent path %s" % (varname, path)
)
os.environ[varname] = path if isinstance(path, str) else path.encode("utf8")
except:
logger.exception(
"set_environ(%s, %s): exception when setting environment variable",
varname,
path,
)
|
def set_environ(varname, path):
"""Define an environment variable for use in CleanerML and Winapp2.ini"""
if not path:
return
if varname in os.environ:
# logger.debug('set_environ(%s, %s): skipping because environment variable is already defined', varname, path)
if "nt" == os.name:
os.environ[varname] = bleachbit.expandvars("%%%s%%" % varname).encode(
"utf-8"
)
# Do not redefine the environment variable when it already exists
# But re-encode them with utf-8 instead of mbcs
return
try:
if not os.path.exists(path):
raise RuntimeError(
"Variable %s points to a non-existent path %s" % (varname, path)
)
os.environ[varname] = path.encode("utf8")
except:
logger.exception(
"set_environ(%s, %s): exception when setting environment variable",
varname,
path,
)
|
https://github.com/bleachbit/bleachbit/issues/558
|
Traceback (most recent call last):
File "bleachbit\Windows.pyo", line 600, in set_environ
UnicodeDecodeError: 'utf8' codec can't decode byte 0xdc in position 26: invalid continuation byte
set_environ(cd, C:\Users\xxxxx-xxxxxx-x-xxÜö\AppData\Local\BleachBit): exception when setting environment variable
|
UnicodeDecodeError
|
def download_models(
content_model_path=DEFAULT_CONTENT_MODEL_PATH,
subject_model_path=DEFAULT_SUBJECT_MODEL_PATH,
twentysixhundred_model_path=DEFAULT_2600_MODEL_PATH,
on_error=None,
):
"""Download models
Calls on_error(primary_message, secondary_message) in case of error
Returns success as boolean value
"""
from urllib2 import urlopen, URLError, HTTPError
from httplib import HTTPException
import socket
for url, fn in (
(URL_CLINTON_SUBJECT, subject_model_path),
(URL_CLINTON_CONTENT, content_model_path),
(URL_2600, twentysixhundred_model_path),
):
if os.path.exists(fn):
logger.debug("File %s already exists", fn)
continue
logger.info("Downloading %s to %s", url, fn)
try:
resp = urlopen(url, cafile=CA_BUNDLE)
with open(fn, "wb") as f:
f.write(resp.read())
except (URLError, HTTPError, HTTPException, socket.error) as exc:
msg = _("Downloading url failed: %s") % url
msg2 = "{}: {}".format(type(exc).__name__, exc)
logger.exception(msg)
if on_error:
on_error(msg, msg2)
from bleachbit.FileUtilities import delete
delete(fn, ignore_missing=True) # delete any partial download
return False
return True
|
def download_models(
content_model_path=DEFAULT_CONTENT_MODEL_PATH,
subject_model_path=DEFAULT_SUBJECT_MODEL_PATH,
twentysixhundred_model_path=DEFAULT_2600_MODEL_PATH,
on_error=None,
):
"""Download models
Calls on_error(primary_message, secondary_message) in case of error
Returns success as boolean value
"""
from urllib2 import urlopen, URLError, HTTPError
from httplib import HTTPException
import socket
if HAVE_CERTIFI:
cafile = certifi.where()
else:
cafile = None
for url, fn in (
(URL_CLINTON_SUBJECT, subject_model_path),
(URL_CLINTON_CONTENT, content_model_path),
(URL_2600, twentysixhundred_model_path),
):
if os.path.exists(fn):
logger.debug("File %s already exists", fn)
continue
logger.info("Downloading %s to %s", url, fn)
try:
resp = urlopen(url, cafile=cafile)
with open(fn, "wb") as f:
f.write(resp.read())
except (URLError, HTTPError, HTTPException, socket.error) as exc:
msg = _("Downloading url failed: %s") % url
msg2 = "{}: {}".format(type(exc).__name__, exc)
logger.exception(msg)
if on_error:
on_error(msg, msg2)
from bleachbit.FileUtilities import delete
delete(fn, ignore_missing=True) # delete any partial download
return False
return True
|
https://github.com/bleachbit/bleachbit/issues/614
|
Automatically preserving language en.
Downloading https://sourceforge.net/projects/bleachbit/files/chaff/clinton_subject_model.json.bz2/download to B:\BleachBit-Portable\clinton_subject_model.json.bz2
Traceback (most recent call last):
File "bleachbit\GuiChaff.pyo", line 160, in on_make_files
File "bleachbit\GuiChaff.pyo", line 140, in download_models_dialog
File "bleachbit\GuiChaff.pyo", line 130, in download_models_gui
File "bleachbit\Chaff.pyo", line 141, in download_models
File "urllib2.pyo", line 144, in urlopen
File "ssl.pyo", line 440, in create_default_context
IOError: [Errno 2] No such file or directory
|
IOError
|
def run_setup():
setup(
name="bleachbit",
version=bleachbit.APP_VERSION,
description="Free space and maintain privacy",
long_description="BleachBit frees space and maintains privacy by quickly wiping files you don't need and didn't know you had. Supported applications include Firefox, Flash, Internet Explorer, Java, Opera, Safari, GNOME, and many others.",
author="Andrew Ziem",
author_email="andrew@bleachbit.org",
download_url="https://www.bleachbit.org/download",
license="GPLv3",
url=bleachbit.APP_URL,
platforms="Linux and Windows; Python v2.6 and 2.7; GTK v3.12+",
packages=["bleachbit", "bleachbit.markovify"],
**args,
)
|
def run_setup():
setup(
name="bleachbit",
version=bleachbit.APP_VERSION,
description="Free space and maintain privacy",
long_description="BleachBit frees space and maintains privacy by quickly wiping files you don't need and didn't know you had. Supported applications include Firefox, Flash, Internet Explorer, Java, Opera, Safari, GNOME, and many others.",
author="Andrew Ziem",
author_email="andrew@bleachbit.org",
download_url="https://www.bleachbit.org/download",
license="GPLv3",
url=bleachbit.APP_URL,
platforms="Linux and Windows; Python v2.6 and 2.7; GTK v3.12+",
packages=["bleachbit"],
**args,
)
|
https://github.com/bleachbit/bleachbit/issues/614
|
Automatically preserving language en.
Downloading https://sourceforge.net/projects/bleachbit/files/chaff/clinton_subject_model.json.bz2/download to B:\BleachBit-Portable\clinton_subject_model.json.bz2
Traceback (most recent call last):
File "bleachbit\GuiChaff.pyo", line 160, in on_make_files
File "bleachbit\GuiChaff.pyo", line 140, in download_models_dialog
File "bleachbit\GuiChaff.pyo", line 130, in download_models_gui
File "bleachbit\Chaff.pyo", line 141, in download_models
File "urllib2.pyo", line 144, in urlopen
File "ssl.pyo", line 440, in create_default_context
IOError: [Errno 2] No such file or directory
|
IOError
|
def build():
"""Build the application"""
logger.info("Deleting directories build and dist")
shutil.rmtree("build", ignore_errors=True)
shutil.rmtree("dist", ignore_errors=True)
shutil.rmtree("BleachBit-Portable", ignore_errors=True)
logger.info("Running py2exe")
shutil.copyfile("bleachbit.py", "bleachbit_console.py")
cmd = sys.executable + " -OO setup.py py2exe"
run_cmd(cmd)
assert_exist("dist\\bleachbit.exe")
assert_exist("dist\\bleachbit_console.exe")
os.remove("bleachbit_console.py")
if not os.path.exists("dist"):
os.makedirs("dist")
logger.info("Copying GTK files and icon")
copytree(GTK_DIR + "\\etc", "dist\\etc")
copytree(GTK_DIR + "\\lib", "dist\\lib")
for subpath in ["fontconfig", "fonts", "icons", "themes"]:
copytree(os.path.join(GTK_DIR, "share", subpath), "dist\\share\\" + subpath)
SCHEMAS_DIR = "share\\glib-2.0\\schemas"
os.makedirs(os.path.join("dist", SCHEMAS_DIR))
shutil.copyfile(
os.path.join(GTK_DIR, SCHEMAS_DIR, "gschemas.compiled"),
os.path.join("dist", SCHEMAS_DIR, "gschemas.compiled"),
)
shutil.copyfile("bleachbit.png", "dist\\share\\bleachbit.png")
for dll in glob.glob1(GTK_DIR, "*.dll"):
shutil.copyfile(os.path.join(GTK_DIR, dll), "dist\\" + dll)
os.mkdir("dist\\data")
shutil.copyfile("data\\app-menu.ui", "dist\\data\\app-menu.ui")
logger.info("Copying CA bundle")
shutil.copyfile(certifi.where(), os.path.join("dist", "cacert.pem"))
logger.info("Copying BleachBit localizations")
shutil.rmtree("dist\\share\\locale", ignore_errors=True)
copytree("locale", "dist\\share\\locale")
assert_exist("dist\\share\\locale\\es\\LC_MESSAGES\\bleachbit.mo")
logger.info("Copying BleachBit cleaners")
if not os.path.exists("dist\\share\\cleaners"):
os.makedirs("dist\\share\\cleaners")
cleaners_files = recursive_glob("cleaners", ["*.xml"])
for file in cleaners_files:
shutil.copy(file, "dist\\share\\cleaners")
logger.info("Checking for CleanerML")
assert_exist("dist\\share\\cleaners\\internet_explorer.xml")
logger.info("Copying license")
shutil.copy("COPYING", "dist")
sign_code("dist\\bleachbit.exe")
sign_code("dist\\bleachbit_console.exe")
assert_execute_console()
|
def build():
"""Build the application"""
logger.info("Deleting directories build and dist")
shutil.rmtree("build", ignore_errors=True)
shutil.rmtree("dist", ignore_errors=True)
shutil.rmtree("BleachBit-Portable", ignore_errors=True)
logger.info("Running py2exe")
shutil.copyfile("bleachbit.py", "bleachbit_console.py")
cmd = sys.executable + " -OO setup.py py2exe"
run_cmd(cmd)
assert_exist("dist\\bleachbit.exe")
assert_exist("dist\\bleachbit_console.exe")
os.remove("bleachbit_console.py")
if not os.path.exists("dist"):
os.makedirs("dist")
logger.info("Copying GTK files and icon")
copytree(GTK_DIR + "\\etc", "dist\\etc")
copytree(GTK_DIR + "\\lib", "dist\\lib")
for subpath in ["glib-2.0", "fontconfig", "fonts", "icons", "themes"]:
copytree(os.path.join(GTK_DIR, "share", subpath), "dist\\share\\" + subpath)
shutil.copyfile("bleachbit.png", "dist\\share\\bleachbit.png")
for dll in glob.glob1(GTK_DIR, "*.dll"):
shutil.copyfile(os.path.join(GTK_DIR, dll), "dist\\" + dll)
os.mkdir("dist\\data")
shutil.copyfile("data\\app-menu.ui", "dist\\data\\app-menu.ui")
logger.info("Copying BleachBit localizations")
shutil.rmtree("dist\\share\\locale", ignore_errors=True)
copytree("locale", "dist\\share\\locale")
assert_exist("dist\\share\\locale\\es\\LC_MESSAGES\\bleachbit.mo")
logger.info("Copying BleachBit cleaners")
if not os.path.exists("dist\\share\\cleaners"):
os.makedirs("dist\\share\\cleaners")
cleaners_files = recursive_glob("cleaners", ["*.xml"])
for file in cleaners_files:
shutil.copy(file, "dist\\share\\cleaners")
logger.info("Checking for CleanerML")
assert_exist("dist\\share\\cleaners\\internet_explorer.xml")
logger.info("Copying license")
shutil.copy("COPYING", "dist")
sign_code("dist\\bleachbit.exe")
sign_code("dist\\bleachbit_console.exe")
assert_execute_console()
|
https://github.com/bleachbit/bleachbit/issues/614
|
Automatically preserving language en.
Downloading https://sourceforge.net/projects/bleachbit/files/chaff/clinton_subject_model.json.bz2/download to B:\BleachBit-Portable\clinton_subject_model.json.bz2
Traceback (most recent call last):
File "bleachbit\GuiChaff.pyo", line 160, in on_make_files
File "bleachbit\GuiChaff.pyo", line 140, in download_models_dialog
File "bleachbit\GuiChaff.pyo", line 130, in download_models_gui
File "bleachbit\Chaff.pyo", line 141, in download_models
File "urllib2.pyo", line 144, in urlopen
File "ssl.pyo", line 440, in create_default_context
IOError: [Errno 2] No such file or directory
|
IOError
|
def delete_unnecessary():
logger.info("Deleting unnecessary files")
# Remove SVG to reduce space and avoid this error
# Error loading theme icon 'dialog-warning' for stock: Unable to load image-loading module: C:/Python27/Lib/site-packages/gtk-2.0/runtime/lib/gdk-pixbuf-2.0/2.10.0/loaders/libpixbufloader-svg.dll: `C:/Python27/Lib/site-packages/gtk-2.0/runtime/lib/gdk-pixbuf-2.0/2.10.0/loaders/libpixbufloader-svg.dll': The specified module could not be found.
# https://bugs.launchpad.net/bleachbit/+bug/1650907
delete_paths = [
r"_win32sysloader.pyd",
r"lib\gdk-pixbuf-2.0",
r"lib\gdbus-2.0",
r"perfmon.pyd",
r"select.pyd",
r"servicemanager.pyd",
r"share\themes\default",
r"share\themes\emacs",
r"share\fontconfig",
r"share\icons\highcontrast",
r"share\themes",
r"win32evtlog.pyd",
r"win32pipe.pyd",
r"win32wnet.pyd",
]
for path in delete_paths:
path = r"dist\{}".format(path)
if not os.path.exists(path):
logger.warning("Path does not exist: " + path)
continue
if os.path.isdir(path):
this_dir_size = get_dir_size(path)
shutil.rmtree(path, ignore_errors=True)
logger.info(
"Deleting directory {} saved {:,} B".format(path, this_dir_size)
)
else:
logger.info(
"Deleting file {} saved {:,} B".format(path, os.path.getsize(path))
)
os.remove(path)
# by wildcard with recursive search
delete_wildcards = [
"*.a",
"*.def",
"*.lib",
"atk10.mo",
"gdk-pixbuf.mo",
"gettext-runtime.mo",
"glib20.mo",
"gtk20-properties.mo",
"libgsf.mo",
]
for wc in delete_wildcards:
total_size = 0
for f in recursive_glob("dist", [wc]):
total_size += os.path.getsize(f)
os.remove(f)
logger.info("Deleting wildcard {} saved {:,}B".format(wc, total_size))
|
def delete_unnecessary():
logger.info("Deleting unnecessary files")
# Remove SVG to reduce space and avoid this error
# Error loading theme icon 'dialog-warning' for stock: Unable to load image-loading module: C:/Python27/Lib/site-packages/gtk-2.0/runtime/lib/gdk-pixbuf-2.0/2.10.0/loaders/libpixbufloader-svg.dll: `C:/Python27/Lib/site-packages/gtk-2.0/runtime/lib/gdk-pixbuf-2.0/2.10.0/loaders/libpixbufloader-svg.dll': The specified module could not be found.
# https://bugs.launchpad.net/bleachbit/+bug/1650907
delete_paths = [
r"_win32sysloader.pyd",
r"lib\gdk-pixbuf-2.0",
r"lib\gdbus-2.0",
r"perfmon.pyd",
r"select.pyd",
r"servicemanager.pyd",
r"share\themes\default",
r"share\themes\emacs",
r"share\fontconfig",
r"share\glib-2.0",
r"share\icons\highcontrast",
r"share\themes",
r"win32evtlog.pyd",
r"win32pipe.pyd",
r"win32wnet.pyd",
]
for path in delete_paths:
path = r"dist\{}".format(path)
if not os.path.exists(path):
logger.warning("Path does not exist: " + path)
continue
if os.path.isdir(path):
this_dir_size = get_dir_size(path)
shutil.rmtree(path, ignore_errors=True)
logger.info(
"Deleting directory {} saved {:,} B".format(path, this_dir_size)
)
else:
logger.info(
"Deleting file {} saved {:,} B".format(path, os.path.getsize(path))
)
os.remove(path)
# by wildcard with recursive search
delete_wildcards = [
"*.a",
"*.def",
"*.lib",
"atk10.mo",
"gdk-pixbuf.mo",
"gettext-runtime.mo",
"glib20.mo",
"gtk20-properties.mo",
"libgsf.mo",
]
for wc in delete_wildcards:
total_size = 0
for f in recursive_glob("dist", [wc]):
total_size += os.path.getsize(f)
os.remove(f)
logger.info("Deleting wildcard {} saved {:,}B".format(wc, total_size))
|
https://github.com/bleachbit/bleachbit/issues/614
|
Automatically preserving language en.
Downloading https://sourceforge.net/projects/bleachbit/files/chaff/clinton_subject_model.json.bz2/download to B:\BleachBit-Portable\clinton_subject_model.json.bz2
Traceback (most recent call last):
File "bleachbit\GuiChaff.pyo", line 160, in on_make_files
File "bleachbit\GuiChaff.pyo", line 140, in download_models_dialog
File "bleachbit\GuiChaff.pyo", line 130, in download_models_gui
File "bleachbit\Chaff.pyo", line 141, in download_models
File "urllib2.pyo", line 144, in urlopen
File "ssl.pyo", line 440, in create_default_context
IOError: [Errno 2] No such file or directory
|
IOError
|
def __init__(self, auto_exit, *args, **kwargs):
super(GUI, self).__init__(*args, **kwargs)
self.auto_exit = auto_exit
self.set_wmclass(APP_NAME, APP_NAME)
self.populate_window()
# Redirect logging to the GUI.
bb_logger = logging.getLogger("bleachbit")
from bleachbit.Log import GtkLoggerHandler
self.gtklog = GtkLoggerHandler(self.append_text)
bb_logger.addHandler(self.gtklog)
# process any delayed logs
from bleachbit.Log import DelayLog
if isinstance(sys.stderr, DelayLog):
for msg in sys.stderr.read():
self.append_text(msg)
# if stderr was redirected - keep redirecting it
sys.stderr = self.gtklog
Gtk.Settings.get_default().set_property(
"gtk-application-prefer-dark-theme", options.get("dark_mode")
)
if options.is_corrupt():
logger.error(
_("Resetting the configuration file because it is corrupt: %s")
% bleachbit.options_file
)
bleachbit.Options.init_configuration()
if options.get("first_start") and not auto_exit:
if os.name == "posix":
self.append_text(
_(
"Access the application menu by clicking the hamburger icon on the title bar."
)
)
pref = PreferencesDialog(self, self.cb_refresh_operations)
pref.run()
if os.name == "nt":
self.append_text(
_("Access the application menu by clicking the logo on the title bar.")
)
options.set("first_start", False)
if os.name == "nt":
# BitDefender false positive. BitDefender didn't mark BleachBit as infected or show
# anything in its log, but sqlite would fail to import unless BitDefender was in "game mode."
# http://bleachbit.sourceforge.net/forum/074-fails-errors
try:
import sqlite3
except ImportError as e:
self.append_text(
_(
"Error loading the SQLite module: the antivirus software may be blocking it."
),
"error",
)
if os.name == "posix" and bleachbit.expanduser("~") == "/root":
self.append_text(
_(
"You are running BleachBit with administrative privileges for cleaning shared parts of the system, and references to the user profile folder will clean only the root account."
)
)
if os.name == "nt" and options.get("shred"):
from win32com.shell.shell import IsUserAnAdmin
if not IsUserAnAdmin():
self.append_text(
_(
"Run BleachBit with administrator privileges to improve the accuracy of overwriting the contents of files."
)
)
self.append_text("\n")
GLib.idle_add(self.cb_refresh_operations)
|
def __init__(self, auto_exit, *args, **kwargs):
super(GUI, self).__init__(*args, **kwargs)
self.auto_exit = auto_exit
self.set_wmclass(APP_NAME, APP_NAME)
self.populate_window()
# Redirect logging to the GUI.
bb_logger = logging.getLogger("bleachbit")
from bleachbit.Log import GtkLoggerHandler
self.gtklog = GtkLoggerHandler(self.append_text)
bb_logger.addHandler(self.gtklog)
# process any delayed logs
from bleachbit.Log import DelayLog
if isinstance(sys.stderr, DelayLog):
for msg in sys.stderr.read():
self.append_text(msg)
Gtk.Settings.get_default().set_property(
"gtk-application-prefer-dark-theme", options.get("dark_mode")
)
if options.is_corrupt():
logger.error(
_("Resetting the configuration file because it is corrupt: %s")
% bleachbit.options_file
)
bleachbit.Options.init_configuration()
if options.get("first_start") and not auto_exit:
if os.name == "posix":
self.append_text(
_(
"Access the application menu by clicking the hamburger icon on the title bar."
)
)
pref = PreferencesDialog(self, self.cb_refresh_operations)
pref.run()
if os.name == "nt":
self.append_text(
_("Access the application menu by clicking the logo on the title bar.")
)
options.set("first_start", False)
if os.name == "nt":
# BitDefender false positive. BitDefender didn't mark BleachBit as infected or show
# anything in its log, but sqlite would fail to import unless BitDefender was in "game mode."
# http://bleachbit.sourceforge.net/forum/074-fails-errors
try:
import sqlite3
except ImportError as e:
self.append_text(
_(
"Error loading the SQLite module: the antivirus software may be blocking it."
),
"error",
)
if os.name == "posix" and bleachbit.expanduser("~") == "/root":
self.append_text(
_(
"You are running BleachBit with administrative privileges for cleaning shared parts of the system, and references to the user profile folder will clean only the root account."
)
)
if os.name == "nt" and options.get("shred"):
from win32com.shell.shell import IsUserAnAdmin
if not IsUserAnAdmin():
self.append_text(
_(
"Run BleachBit with administrator privileges to improve the accuracy of overwriting the contents of files."
)
)
self.append_text("\n")
GLib.idle_add(self.cb_refresh_operations)
|
https://github.com/bleachbit/bleachbit/issues/609
|
C:\Program Files (x86)\BleachBit>bleachbit_console.exe
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 125, in cb_make_chaff
File "bleachbit\GuiChaff.pyo", line 30, in <module>
File "bleachbit\Chaff.pyo", line 39, in <module>
ImportError: No module named markovify
|
ImportError
|
def __init__(self):
self.queue = []
self.msg = ""
|
def __init__(self):
self.queue = []
|
https://github.com/bleachbit/bleachbit/issues/609
|
C:\Program Files (x86)\BleachBit>bleachbit_console.exe
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 125, in cb_make_chaff
File "bleachbit\GuiChaff.pyo", line 30, in <module>
File "bleachbit\Chaff.pyo", line 39, in <module>
ImportError: No module named markovify
|
ImportError
|
def read(self):
for msg in self.queue:
yield msg
queue = []
|
def read(self):
for msg in self.queue:
yield msg + "\n"
queue = []
|
https://github.com/bleachbit/bleachbit/issues/609
|
C:\Program Files (x86)\BleachBit>bleachbit_console.exe
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 125, in cb_make_chaff
File "bleachbit\GuiChaff.pyo", line 30, in <module>
File "bleachbit\Chaff.pyo", line 39, in <module>
ImportError: No module named markovify
|
ImportError
|
def write(self, msg):
self.msg += msg
if self.msg[-1] == "\n":
self.queue.append(self.msg)
self.msg = ""
|
def write(self, msg):
self.queue.append(msg)
|
https://github.com/bleachbit/bleachbit/issues/609
|
C:\Program Files (x86)\BleachBit>bleachbit_console.exe
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 125, in cb_make_chaff
File "bleachbit\GuiChaff.pyo", line 30, in <module>
File "bleachbit\Chaff.pyo", line 39, in <module>
ImportError: No module named markovify
|
ImportError
|
def __init__(self, append_text):
logging.Handler.__init__(self)
self.append_text = append_text
self.msg = ""
self.update_log_level()
|
def __init__(self, append_text):
logging.Handler.__init__(self)
self.append_text = append_text
self.update_log_level()
|
https://github.com/bleachbit/bleachbit/issues/609
|
C:\Program Files (x86)\BleachBit>bleachbit_console.exe
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 125, in cb_make_chaff
File "bleachbit\GuiChaff.pyo", line 30, in <module>
File "bleachbit\Chaff.pyo", line 39, in <module>
ImportError: No module named markovify
|
ImportError
|
def cb_shred_quit(self, action, param):
"""Shred settings (for privacy reasons) and quit"""
# build a list of paths to delete
paths = []
if "nt" == os.name and portable_mode:
# in portable mode on Windows, the options directory includes
# executables
paths.append(bleachbit.options_file)
else:
paths.append(bleachbit.options_dir)
# prompt the user to confirm
if not GUI.shred_paths(self._window, paths):
logger.debug("user aborted shred")
# aborted
return
# in portable mode, rebuild a minimal bleachbit.ini
if "nt" == os.name and portable_mode:
with open(bleachbit.options_file, "w") as f:
f.write("[Portable]\n")
# Quit the application through the idle loop to allow the worker
# to delete the files. Use the lowest priority because the worker
# uses the standard priority. Otherwise, this will quit before
# the files are deleted.
GLib.idle_add(lambda: self.quit(), priority=GObject.PRIORITY_LOW)
|
def cb_shred_quit(self, action, param):
"""Shred settings (for privacy reasons) and quit"""
# build a list of paths to delete
paths = []
if "nt" == os.name and portable_mode:
# in portable mode on Windows, the options directory includes
# executables
paths.append(bleachbit.options_file)
else:
paths.append(bleachbit.options_dir)
# prompt the user to confirm
if not self.shred_paths(paths):
logger.debug("user aborted shred")
# aborted
return
# in portable mode, rebuild a minimal bleachbit.ini
if "nt" == os.name and portable_mode:
with open(bleachbit.options_file, "w") as f:
f.write("[Portable]\n")
# Quit the application through the idle loop to allow the worker
# to delete the files. Use the lowest priority because the worker
# uses the standard priority. Otherwise, this will quit before
# the files are deleted.
GLib.idle_add(lambda: Gtk.main_quit(), priority=GObject.PRIORITY_LOW)
|
https://github.com/bleachbit/bleachbit/issues/552
|
Traceback (most recent call last):
File "bleachbit/bleachbit/GUI.py", line 177, in cb_shred_quit
if not self.shred_paths(paths):
AttributeError: 'Bleachbit' object has no attribute 'shred_paths'
|
AttributeError
|
def __init__(self, uac=True, shred_paths=None, exit=False):
if uac and "nt" == os.name and Windows.elevate_privileges():
# privileges escalated in other process
sys.exit(0)
Gtk.Application.__init__(
self,
application_id="org.gnome.Bleachbit",
flags=Gio.ApplicationFlags.FLAGS_NONE,
)
if not exit:
from bleachbit import RecognizeCleanerML
RecognizeCleanerML.RecognizeCleanerML()
register_cleaners()
GObject.threads_init()
if shred_paths:
self._shred_paths = shred_paths
return
if "nt" == os.name:
# BitDefender false positive. BitDefender didn't mark BleachBit as infected or show
# anything in its log, but sqlite would fail to import unless BitDefender was in "game mode."
# https://www.bleachbit.org/forum/074-fails-errors
try:
import sqlite3
except ImportError:
logger.exception(
_(
"Error loading the SQLite module: the antivirus software may be blocking it."
)
)
if exit:
# This is used for automated testing of whether the GUI can start.
print("Success")
GObject.idle_add(lambda: self.quit(), priority=GObject.PRIORITY_LOW)
|
def __init__(self, uac=True, shred_paths=None, exit=False):
if uac and "nt" == os.name and Windows.elevate_privileges():
# privileges escalated in other process
sys.exit(0)
Gtk.Application.__init__(
self,
application_id="org.gnome.Bleachbit",
flags=Gio.ApplicationFlags.FLAGS_NONE,
)
if not exit:
from bleachbit import RecognizeCleanerML
RecognizeCleanerML.RecognizeCleanerML()
register_cleaners()
GObject.threads_init()
if shred_paths:
self._shred_paths = shred_paths
return
if "nt" == os.name:
# BitDefender false positive. BitDefender didn't mark BleachBit as infected or show
# anything in its log, but sqlite would fail to import unless BitDefender was in "game mode."
# https://www.bleachbit.org/forum/074-fails-errors
try:
import sqlite3
except ImportError:
logger.exception(
_(
"Error loading the SQLite module: the antivirus software may be blocking it."
)
)
if "posix" == os.name and bleachbit.expanduser("~") == "/root":
self.append_text(
_(
"You are running BleachBit with administrative privileges for cleaning shared parts of the system, and references to the user profile folder will clean only the root account."
)
)
if "nt" == os.name and options.get("shred"):
from win32com.shell.shell import IsUserAnAdmin
if not IsUserAnAdmin():
self.append_text(
_(
"Run BleachBit with administrator privileges to improve the accuracy of overwriting the contents of files."
)
)
self.append_text("\n")
if exit:
# This is used for automated testing of whether the GUI can start.
print("Success")
GObject.idle_add(lambda: self.quit(), priority=GObject.PRIORITY_LOW)
|
https://github.com/bleachbit/bleachbit/issues/554
|
Traceback (most recent call last):
File "\bleachbit.py", line 39, in <module>
app = bleachbit.GUI.Bleachbit()
File "\bleachbit\GUI.py", line 87, in __init__
self.append_text(
AttributeError: 'Bleachbit' object has no attribute 'append_text'
|
AttributeError
|
def __init__(self, *args, **kwargs):
super(GUI, self).__init__(*args, **kwargs)
from bleachbit import RecognizeCleanerML
RecognizeCleanerML.RecognizeCleanerML()
register_cleaners()
self.set_wmclass(APP_NAME, APP_NAME)
self.populate_window()
# Redirect logging to the GUI.
bb_logger = logging.getLogger("bleachbit")
gtklog = GtkLoggerHandler(self.append_text)
bb_logger.addHandler(gtklog)
if "nt" == os.name and "windows_exe" == getattr(sys, "frozen", None):
# On Microsoft Windows this avoids py2exe redirecting stderr to
# bleachbit.exe.log.
# sys.frozen = console_exe means the console is shown
from bleachbit import logger_sh
bb_logger.removeHandler(logger_sh)
Gtk.Settings.get_default().set_property(
"gtk-application-prefer-dark-theme", options.get("dark_mode")
)
if options.get("first_start") and "posix" == os.name:
pref = PreferencesDialog(self, self.cb_refresh_operations)
pref.run()
options.set("first_start", False)
if bleachbit.online_update_notification_enabled and options.get(
"check_online_updates"
):
self.check_online_updates()
if "nt" == os.name:
# BitDefender false positive. BitDefender didn't mark BleachBit as infected or show
# anything in its log, but sqlite would fail to import unless BitDefender was in "game mode."
# http://bleachbit.sourceforge.net/forum/074-fails-errors
try:
import sqlite3
except ImportError as e:
self.append_text(
_(
"Error loading the SQLite module: the antivirus software may be blocking it."
),
"error",
)
if "posix" == os.name and bleachbit.expanduser("~") == "/root":
self.append_text(
_(
"You are running BleachBit with administrative privileges for cleaning shared parts of the system, and references to the user profile folder will clean only the root account."
)
)
if "nt" == os.name and options.get("shred"):
from win32com.shell.shell import IsUserAnAdmin
if not IsUserAnAdmin():
self.append_text(
_(
"Run BleachBit with administrator privileges to improve the accuracy of overwriting the contents of files."
)
)
self.append_text("\n")
|
def __init__(self, *args, **kwargs):
super(GUI, self).__init__(*args, **kwargs)
from bleachbit import RecognizeCleanerML
RecognizeCleanerML.RecognizeCleanerML()
register_cleaners()
self.set_wmclass(APP_NAME, APP_NAME)
self.populate_window()
# Redirect logging to the GUI.
bb_logger = logging.getLogger("bleachbit")
gtklog = GtkLoggerHandler(self.append_text)
bb_logger.addHandler(gtklog)
if "nt" == os.name and "windows_exe" == getattr(sys, "frozen", None):
# On Microsoft Windows this avoids py2exe redirecting stderr to
# bleachbit.exe.log.
# sys.frozen = console_exe means the console is shown
from bleachbit import logger_sh
bb_logger.removeHandler(logger_sh)
Gtk.Settings.get_default().set_property(
"gtk-application-prefer-dark-theme", options.get("dark_mode")
)
if options.get("first_start") and "posix" == os.name:
pref = PreferencesDialog(self, self.cb_refresh_operations)
pref.run()
options.set("first_start", False)
if bleachbit.online_update_notification_enabled and options.get(
"check_online_updates"
):
self.check_online_updates()
if "nt" == os.name:
# BitDefender false positive. BitDefender didn't mark BleachBit as infected or show
# anything in its log, but sqlite would fail to import unless BitDefender was in "game mode."
# http://bleachbit.sourceforge.net/forum/074-fails-errors
try:
import sqlite3
except ImportError as e:
self.append_text(
_(
"Error loading the SQLite module: the antivirus software may be blocking it."
),
"error",
)
|
https://github.com/bleachbit/bleachbit/issues/554
|
Traceback (most recent call last):
File "\bleachbit.py", line 39, in <module>
app = bleachbit.GUI.Bleachbit()
File "\bleachbit\GUI.py", line 87, in __init__
self.append_text(
AttributeError: 'Bleachbit' object has no attribute 'append_text'
|
AttributeError
|
def build_app_menu(self):
builder = Gtk.Builder()
builder.add_from_file(
os.path.join(bleachbit.bleachbit_exe_path, "data", "app-menu.ui")
)
menu = builder.get_object("app-menu")
self.set_app_menu(menu)
# set up mappings between <attribute name="action"> in app-menu.ui and methods in this class
actions = {
"shredFiles": self.cb_shred_file,
"shredFolders": self.cb_shred_folder,
"shredClipboard": self.cb_shred_clipboard,
"wipeFreeSpace": self.cb_wipe_free_space,
"shredQuit": self.cb_shred_quit,
"preferences": self.cb_preferences_dialog,
"diagnostics": self.diagnostic_dialog,
"about": self.about,
"quit": self.quit,
}
for actionName, callback in actions.items():
action = Gio.SimpleAction.new(actionName, None)
action.connect("activate", callback)
self.add_action(action)
# help needs more parameters and needs to be declared separately
helpAction = Gio.SimpleAction.new("help", None)
helpAction.connect(
"activate", GuiBasic.open_url, bleachbit.help_contents_url, self._window
)
self.add_action(helpAction)
|
def build_app_menu(self):
builder = Gtk.Builder()
builder.add_from_file(
os.path.join(bleachbit.bleachbit_exe_path, "data", "app-menu.ui")
)
menu = builder.get_object("app-menu")
self.set_app_menu(menu)
# set up mappings between <attribute name="action"> in app-menu.ui and methods in this class
actions = {
"shredFiles": self.cb_shred_file,
"shredFolders": self.cb_shred_folder,
"wipeFreeSpace": self.cb_wipe_free_space,
"shredQuit": self.cb_shred_quit,
"preferences": self.cb_preferences_dialog,
"diagnostics": self.diagnostic_dialog,
"about": self.about,
"quit": self.quit,
}
for actionName, callback in actions.items():
action = Gio.SimpleAction.new(actionName, None)
action.connect("activate", callback)
self.add_action(action)
# help needs more parameters and needs to be declared separately
helpAction = Gio.SimpleAction.new("help", None)
helpAction.connect(
"activate", GuiBasic.open_url, bleachbit.help_contents_url, self._window
)
self.add_action(helpAction)
|
https://github.com/bleachbit/bleachbit/issues/545
|
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 124, in cb_shred_file
File "bleachbit\GuiBasic.pyo", line 94, in browse_files
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 135, in cb_shred_folder
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 173, in cb_wipe_free_space
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
```
|
AttributeError
|
def setup_drag_n_drop(self):
def cb_drag_data_received(widget, context, x, y, data, info, time):
if info == 80:
uris = data.get_uris()
paths = FileUtilities.uris_to_paths(uris)
self.shred_paths(paths)
def setup_widget(widget):
widget.drag_dest_set(
Gtk.DestDefaults.MOTION
| Gtk.DestDefaults.HIGHLIGHT
| Gtk.DestDefaults.DROP,
[Gtk.TargetEntry.new("text/uri-list", 0, 80)],
Gdk.DragAction.COPY,
)
widget.connect("drag_data_received", cb_drag_data_received)
setup_widget(self)
setup_widget(self.textview)
self.textview.connect("drag_motion", lambda widget, context, x, y, time: True)
|
def setup_drag_n_drop(self):
def cb_drag_data_received(widget, context, x, y, data, info, time):
if info == 80:
uris = data.get_uris()
paths = FileUtilities.uris_to_paths(uris)
self.shred_paths(paths)
self.drag_dest_set(
Gtk.DestDefaults.MOTION | Gtk.DestDefaults.HIGHLIGHT | Gtk.DestDefaults.DROP,
[Gtk.TargetEntry.new("text/uri-list", 0, 80)],
Gdk.DragAction.COPY,
)
self.connect("drag_data_received", cb_drag_data_received)
|
https://github.com/bleachbit/bleachbit/issues/545
|
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 124, in cb_shred_file
File "bleachbit\GuiBasic.pyo", line 94, in browse_files
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 135, in cb_shred_folder
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 173, in cb_wipe_free_space
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
```
|
AttributeError
|
def browse_folder(parent, title, multiple, stock_button):
"""Ask the user to select a folder. Return the full path or None."""
if "nt" == os.name and None == os.getenv("BB_NATIVE"):
ret = Windows.browse_folder(parent, title)
return [ret] if multiple and not ret is None else ret
# fall back to GTK+
chooser = Gtk.FileChooserDialog(
transient_for=parent, title=title, action=Gtk.FileChooserAction.SELECT_FOLDER
)
chooser.add_buttons(
_("_Cancel"), Gtk.ResponseType.CANCEL, stock_button, Gtk.ResponseType.OK
)
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_select_multiple(multiple)
chooser.set_current_folder(expanduser("~"))
resp = chooser.run()
if multiple:
ret = chooser.get_filenames()
else:
ret = chooser.get_filename()
chooser.hide()
chooser.destroy()
if Gtk.ResponseType.OK != resp:
# user cancelled
return None
return ret
|
def browse_folder(parent, title, multiple, stock_button):
"""Ask the user to select a folder. Return the full path or None."""
if "nt" == os.name and None == os.getenv("BB_NATIVE"):
ret = Windows.browse_folder(parent.window.handle if parent else None, title)
return [ret] if multiple and not ret is None else ret
# fall back to GTK+
chooser = Gtk.FileChooserDialog(
transient_for=parent, title=title, action=Gtk.FileChooserAction.SELECT_FOLDER
)
chooser.add_buttons(
_("_Cancel"), Gtk.ResponseType.CANCEL, stock_button, Gtk.ResponseType.OK
)
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_select_multiple(multiple)
chooser.set_current_folder(expanduser("~"))
resp = chooser.run()
if multiple:
ret = chooser.get_filenames()
else:
ret = chooser.get_filename()
chooser.hide()
chooser.destroy()
if Gtk.ResponseType.OK != resp:
# user cancelled
return None
return ret
|
https://github.com/bleachbit/bleachbit/issues/545
|
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 124, in cb_shred_file
File "bleachbit\GuiBasic.pyo", line 94, in browse_files
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 135, in cb_shred_folder
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 173, in cb_wipe_free_space
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
```
|
AttributeError
|
def browse_file(parent, title):
"""Prompt user to select a single file"""
if "nt" == os.name and None == os.getenv("BB_NATIVE"):
return Windows.browse_file(parent, title)
chooser = Gtk.FileChooserDialog(
title=title, transient_for=parent, action=Gtk.FileChooserAction.OPEN
)
chooser.add_buttons(
_("_Cancel"), Gtk.ResponseType.CANCEL, _("_Open"), Gtk.ResponseType.OK
)
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_current_folder(expanduser("~"))
resp = chooser.run()
path = chooser.get_filename()
chooser.destroy()
if Gtk.ResponseType.OK != resp:
# user cancelled
return None
return path
|
def browse_file(parent, title):
"""Prompt user to select a single file"""
if "nt" == os.name and None == os.getenv("BB_NATIVE"):
return Windows.browse_file(parent.window.handle, title)
chooser = Gtk.FileChooserDialog(
title=title, transient_for=parent, action=Gtk.FileChooserAction.OPEN
)
chooser.add_buttons(
_("_Cancel"), Gtk.ResponseType.CANCEL, _("_Open"), Gtk.ResponseType.OK
)
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_current_folder(expanduser("~"))
resp = chooser.run()
path = chooser.get_filename()
chooser.destroy()
if Gtk.ResponseType.OK != resp:
# user cancelled
return None
return path
|
https://github.com/bleachbit/bleachbit/issues/545
|
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 124, in cb_shred_file
File "bleachbit\GuiBasic.pyo", line 94, in browse_files
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 135, in cb_shred_folder
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 173, in cb_wipe_free_space
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
```
|
AttributeError
|
def browse_files(parent, title):
"""Prompt user to select multiple files to delete"""
if "nt" == os.name and None == os.getenv("BB_NATIVE"):
return Windows.browse_files(parent, title)
chooser = Gtk.FileChooserDialog(
title=title, transient_for=parent, action=Gtk.FileChooserAction.OPEN
)
chooser.add_buttons(
_("_Cancel"), Gtk.ResponseType.CANCEL, _("_Delete"), Gtk.ResponseType.OK
)
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_select_multiple(True)
chooser.set_current_folder(expanduser("~"))
resp = chooser.run()
paths = chooser.get_filenames()
chooser.destroy()
if Gtk.ResponseType.OK != resp:
# user cancelled
return None
return paths
|
def browse_files(parent, title):
"""Prompt user to select multiple files to delete"""
if "nt" == os.name and None == os.getenv("BB_NATIVE"):
return Windows.browse_files(parent.window.handle, title)
chooser = Gtk.FileChooserDialog(
title=title, transient_for=parent, action=Gtk.FileChooserAction.OPEN
)
chooser.add_buttons(
_("_Cancel"), Gtk.ResponseType.CANCEL, _("_Delete"), Gtk.ResponseType.OK
)
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_select_multiple(True)
chooser.set_current_folder(expanduser("~"))
resp = chooser.run()
paths = chooser.get_filenames()
chooser.destroy()
if Gtk.ResponseType.OK != resp:
# user cancelled
return None
return paths
|
https://github.com/bleachbit/bleachbit/issues/545
|
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 124, in cb_shred_file
File "bleachbit\GuiBasic.pyo", line 94, in browse_files
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 135, in cb_shred_folder
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 173, in cb_wipe_free_space
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
```
|
AttributeError
|
def browse_folder(_, title):
"""Ask the user to select a folder. Return full path."""
pidl = shell.SHBrowseForFolder(None, None, title)[0]
if pidl is None:
# user cancelled
return None
fullpath = shell.SHGetPathFromIDList(pidl)
return fullpath
|
def browse_folder(hwnd, title):
"""Ask the user to select a folder. Return full path."""
pidl = shell.SHBrowseForFolder(hwnd, None, title)[0]
if pidl is None:
# user cancelled
return None
fullpath = shell.SHGetPathFromIDList(pidl)
return fullpath
|
https://github.com/bleachbit/bleachbit/issues/545
|
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 124, in cb_shred_file
File "bleachbit\GuiBasic.pyo", line 94, in browse_files
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 135, in cb_shred_folder
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
Traceback (most recent call last):
File "bleachbit\GUI.pyo", line 173, in cb_wipe_free_space
File "bleachbit\GuiBasic.pyo", line 43, in browse_folder
AttributeError: 'GUI' object has no attribute 'window'
```
|
AttributeError
|
def __toggle_callback(self, cell, path):
"""Callback function to toggle option"""
options.toggle(path)
if online_update_notification_enabled:
self.cb_beta.set_sensitive(options.get("check_online_updates"))
if "nt" == os.name:
self.cb_winapp2.set_sensitive(options.get("check_online_updates"))
if "auto_hide" == path:
self.cb_refresh_operations()
|
def __toggle_callback(self, cell, path):
"""Callback function to toggle option"""
options.toggle(path)
if online_update_notification_enabled:
self.cb_beta.set_sensitive(options.get("check_online_updates"))
if "nt" == os.name:
self.cb_winapp2.set_sensitive(options.get("check_online_updates"))
if "auto_hide" == path:
self.cb_refresh_operations()
if "auto_start" == path:
if "nt" == os.name:
swc = Windows.start_with_computer
if "posix" == os.name:
swc = Unix.start_with_computer
try:
swc(options.get(path))
except:
traceback.print_exc()
dlg = Gtk.MessageDialog(
self.parent,
type=Gtk.MessageType.ERROR,
buttons=Gtk.ButtonsType.OK,
message_format=str(sys.exc_info()[1]),
)
dlg.run()
dlg.destroy()
|
https://github.com/bleachbit/bleachbit/issues/314
|
Traceback (most recent call last):
File "/bleachbit/bleachbit/GUI.py", line 186, in cb_preferences_dialog
pref = PreferencesDialog(self._window, self._window.cb_refresh_operations)
File "/bleachbit/bleachbit/GuiPreferences.py", line 65, in __init__
notebook.append_page(self.__general_page(), Gtk.Label(label=_("General")))
File "/bleachbit/bleachbit/GuiPreferences.py", line 107, in __general_page
swcc = Unix.start_with_computer_check
AttributeError: 'module' object has no attribute 'start_with_computer_check'
|
AttributeError
|
def __general_page(self):
"""Return a widget containing the general page"""
vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
if online_update_notification_enabled:
cb_updates = Gtk.CheckButton.new_with_label(
_("Check periodically for software updates via the Internet")
)
cb_updates.set_active(options.get("check_online_updates"))
cb_updates.connect("toggled", self.__toggle_callback, "check_online_updates")
cb_updates.set_tooltip_text(
_(
"If an update is found, you will be given the option to view information about it. Then, you may manually download and install the update."
)
)
vbox.pack_start(cb_updates, False, True, 0)
updates_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
updates_box.set_border_width(10)
self.cb_beta = Gtk.CheckButton.new_with_label(
label=_("Check for new beta releases")
)
self.cb_beta.set_active(options.get("check_beta"))
self.cb_beta.set_sensitive(options.get("check_online_updates"))
self.cb_beta.connect("toggled", self.__toggle_callback, "check_beta")
updates_box.pack_start(self.cb_beta, False, True, 0)
if "nt" == os.name:
self.cb_winapp2 = Gtk.CheckButton.new_with_label(
_("Download and update cleaners from community (winapp2.ini)")
)
self.cb_winapp2.set_active(options.get("update_winapp2"))
self.cb_winapp2.set_sensitive(options.get("check_online_updates"))
self.cb_winapp2.connect("toggled", self.__toggle_callback, "update_winapp2")
updates_box.pack_start(self.cb_winapp2, False, True, 0)
vbox.pack_start(updates_box, False, True, 0)
# TRANSLATORS: This means to hide cleaners which would do
# nothing. For example, if Firefox were never used on
# this system, this option would hide Firefox to simplify
# the list of cleaners.
cb_auto_hide = Gtk.CheckButton.new_with_label(label=_("Hide irrelevant cleaners"))
cb_auto_hide.set_active(options.get("auto_hide"))
cb_auto_hide.connect("toggled", self.__toggle_callback, "auto_hide")
vbox.pack_start(cb_auto_hide, False, True, 0)
# TRANSLATORS: Overwriting is the same as shredding. It is a way
# to prevent recovery of the data. You could also translate
# 'Shred files to prevent recovery.'
cb_shred = Gtk.CheckButton(_("Overwrite contents of files to prevent recovery"))
cb_shred.set_active(options.get("shred"))
cb_shred.connect("toggled", self.__toggle_callback, "shred")
cb_shred.set_tooltip_text(
_(
"Overwriting is ineffective on some file systems and with certain BleachBit operations. Overwriting is significantly slower."
)
)
vbox.pack_start(cb_shred, False, True, 0)
# Close the application after cleaning is complete.
cb_exit = Gtk.CheckButton.new_with_label(label=_("Exit after cleaning"))
cb_exit.set_active(options.get("exit_done"))
cb_exit.connect("toggled", self.__toggle_callback, "exit_done")
vbox.pack_start(cb_exit, False, True, 0)
# Disable delete confirmation message.
cb_popup = Gtk.CheckButton(label=_("Confirm before delete"))
cb_popup.set_active(options.get("delete_confirmation"))
cb_popup.connect("toggled", self.__toggle_callback, "delete_confirmation")
vbox.pack_start(cb_popup, False, True, 0)
# Use base 1000 over 1024?
cb_units_iec = Gtk.CheckButton(
_("Use IEC sizes (1 KiB = 1024 bytes) instead of SI (1 kB = 1000 bytes)")
)
cb_units_iec.set_active(options.get("units_iec"))
cb_units_iec.connect("toggled", self.__toggle_callback, "units_iec")
vbox.pack_start(cb_units_iec, False, True, 0)
return vbox
|
def __general_page(self):
"""Return a widget containing the general page"""
if "nt" == os.name:
swcc = Windows.start_with_computer_check
if "posix" == os.name:
swcc = Unix.start_with_computer_check
options.set("auto_start", swcc())
vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
if online_update_notification_enabled:
cb_updates = Gtk.CheckButton.new_with_label(
_("Check periodically for software updates via the Internet")
)
cb_updates.set_active(options.get("check_online_updates"))
cb_updates.connect("toggled", self.__toggle_callback, "check_online_updates")
cb_updates.set_tooltip_text(
_(
"If an update is found, you will be given the option to view information about it. Then, you may manually download and install the update."
)
)
vbox.pack_start(cb_updates, False, True, 0)
updates_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
updates_box.set_border_width(10)
self.cb_beta = Gtk.CheckButton.new_with_label(
label=_("Check for new beta releases")
)
self.cb_beta.set_active(options.get("check_beta"))
self.cb_beta.set_sensitive(options.get("check_online_updates"))
self.cb_beta.connect("toggled", self.__toggle_callback, "check_beta")
updates_box.pack_start(self.cb_beta, False, True, 0)
if "nt" == os.name:
self.cb_winapp2 = Gtk.CheckButton.new_with_label(
_("Download and update cleaners from community (winapp2.ini)")
)
self.cb_winapp2.set_active(options.get("update_winapp2"))
self.cb_winapp2.set_sensitive(options.get("check_online_updates"))
self.cb_winapp2.connect("toggled", self.__toggle_callback, "update_winapp2")
updates_box.pack_start(self.cb_winapp2, False, True, 0)
vbox.pack_start(updates_box, False, True, 0)
# TRANSLATORS: This means to hide cleaners which would do
# nothing. For example, if Firefox were never used on
# this system, this option would hide Firefox to simplify
# the list of cleaners.
cb_auto_hide = Gtk.CheckButton.new_with_label(label=_("Hide irrelevant cleaners"))
cb_auto_hide.set_active(options.get("auto_hide"))
cb_auto_hide.connect("toggled", self.__toggle_callback, "auto_hide")
vbox.pack_start(cb_auto_hide, False, True, 0)
# TRANSLATORS: Overwriting is the same as shredding. It is a way
# to prevent recovery of the data. You could also translate
# 'Shred files to prevent recovery.'
cb_shred = Gtk.CheckButton(_("Overwrite contents of files to prevent recovery"))
cb_shred.set_active(options.get("shred"))
cb_shred.connect("toggled", self.__toggle_callback, "shred")
cb_shred.set_tooltip_text(
_(
"Overwriting is ineffective on some file systems and with certain BleachBit operations. Overwriting is significantly slower."
)
)
vbox.pack_start(cb_shred, False, True, 0)
cb_start = Gtk.CheckButton.new_with_label(label=_("Start BleachBit with computer"))
cb_start.set_active(options.get("auto_start"))
cb_start.connect("toggled", self.__toggle_callback, "auto_start")
vbox.pack_start(cb_start, False, True, 0)
# Close the application after cleaning is complete.
cb_exit = Gtk.CheckButton.new_with_label(label=_("Exit after cleaning"))
cb_exit.set_active(options.get("exit_done"))
cb_exit.connect("toggled", self.__toggle_callback, "exit_done")
vbox.pack_start(cb_exit, False, True, 0)
# Disable delete confirmation message.
cb_popup = Gtk.CheckButton(label=_("Confirm before delete"))
cb_popup.set_active(options.get("delete_confirmation"))
cb_popup.connect("toggled", self.__toggle_callback, "delete_confirmation")
vbox.pack_start(cb_popup, False, True, 0)
# Use base 1000 over 1024?
cb_units_iec = Gtk.CheckButton(
_("Use IEC sizes (1 KiB = 1024 bytes) instead of SI (1 kB = 1000 bytes)")
)
cb_units_iec.set_active(options.get("units_iec"))
cb_units_iec.connect("toggled", self.__toggle_callback, "units_iec")
vbox.pack_start(cb_units_iec, False, True, 0)
return vbox
|
https://github.com/bleachbit/bleachbit/issues/314
|
Traceback (most recent call last):
File "/bleachbit/bleachbit/GUI.py", line 186, in cb_preferences_dialog
pref = PreferencesDialog(self._window, self._window.cb_refresh_operations)
File "/bleachbit/bleachbit/GuiPreferences.py", line 65, in __init__
notebook.append_page(self.__general_page(), Gtk.Label(label=_("General")))
File "/bleachbit/bleachbit/GuiPreferences.py", line 107, in __general_page
swcc = Unix.start_with_computer_check
AttributeError: 'module' object has no attribute 'start_with_computer_check'
|
AttributeError
|
def restore(self):
"""Restore saved options from disk"""
try:
self.config.read(bleachbit.options_file)
except:
traceback.print_exc()
if not self.config.has_section("bleachbit"):
self.config.add_section("bleachbit")
if not self.config.has_section("hashpath"):
self.config.add_section("hashpath")
if not self.config.has_section("list/shred_drives"):
from bleachbit.FileUtilities import guess_overwrite_paths
try:
self.set_list("shred_drives", guess_overwrite_paths())
except:
traceback.print_exc()
logger.error("error setting default shred drives")
# set defaults
self.__set_default("auto_hide", True)
self.__set_default("check_beta", False)
self.__set_default("check_online_updates", True)
self.__set_default("shred", False)
self.__set_default("exit_done", False)
self.__set_default("delete_confirmation", True)
self.__set_default("units_iec", False)
if "nt" == os.name:
self.__set_default("update_winapp2", False)
if not self.config.has_section("preserve_languages"):
lang = bleachbit.user_locale
pos = lang.find("_")
if -1 != pos:
lang = lang[0:pos]
for _lang in set([lang, "en"]):
logger.info("automatically preserving language '%s'", lang)
self.set_language(_lang, True)
# BleachBit upgrade or first start ever
if (
not self.config.has_option("bleachbit", "version")
or self.get("version") != bleachbit.APP_VERSION
):
self.set("first_start", True)
# set version
self.set("version", bleachbit.APP_VERSION)
|
def restore(self):
"""Restore saved options from disk"""
try:
self.config.read(bleachbit.options_file)
except:
traceback.print_exc()
if not self.config.has_section("bleachbit"):
self.config.add_section("bleachbit")
if not self.config.has_section("hashpath"):
self.config.add_section("hashpath")
if not self.config.has_section("list/shred_drives"):
from bleachbit.FileUtilities import guess_overwrite_paths
try:
self.set_list("shred_drives", guess_overwrite_paths())
except:
traceback.print_exc()
logger.error("error setting default shred drives")
# set defaults
self.__set_default("auto_hide", True)
self.__set_default("auto_start", False)
self.__set_default("check_beta", False)
self.__set_default("check_online_updates", True)
self.__set_default("shred", False)
self.__set_default("exit_done", False)
self.__set_default("delete_confirmation", True)
self.__set_default("units_iec", False)
if "nt" == os.name:
self.__set_default("update_winapp2", False)
if not self.config.has_section("preserve_languages"):
lang = bleachbit.user_locale
pos = lang.find("_")
if -1 != pos:
lang = lang[0:pos]
for _lang in set([lang, "en"]):
logger.info("automatically preserving language '%s'", lang)
self.set_language(_lang, True)
# BleachBit upgrade or first start ever
if (
not self.config.has_option("bleachbit", "version")
or self.get("version") != bleachbit.APP_VERSION
):
self.set("first_start", True)
# set version
self.set("version", bleachbit.APP_VERSION)
|
https://github.com/bleachbit/bleachbit/issues/314
|
Traceback (most recent call last):
File "/bleachbit/bleachbit/GUI.py", line 186, in cb_preferences_dialog
pref = PreferencesDialog(self._window, self._window.cb_refresh_operations)
File "/bleachbit/bleachbit/GuiPreferences.py", line 65, in __init__
notebook.append_page(self.__general_page(), Gtk.Label(label=_("General")))
File "/bleachbit/bleachbit/GuiPreferences.py", line 107, in __general_page
swcc = Unix.start_with_computer_check
AttributeError: 'module' object has no attribute 'start_with_computer_check'
|
AttributeError
|
def __make_file_provider(self, dirname, filename, recurse, removeself, excludekeys):
"""Change parsed FileKey to action provider"""
regex = ""
if recurse:
search = "walk.files"
path = dirname
if filename.startswith("*."):
filename = filename.replace("*.", ".")
if filename == ".*":
if removeself:
search = "walk.all"
else:
import fnmatch
regex = ' regex="%s" ' % (fnmatch.translate(filename))
else:
search = "glob"
path = os.path.join(dirname, filename)
if path.find("*") == -1:
search = "file"
excludekeysxml = ""
if excludekeys:
if len(excludekeys) > 1:
# multiple
exclude_str = "(%s)" % "|".join(excludekeys)
else:
# just one
exclude_str = excludekeys[0]
excludekeysxml = 'nwholeregex="%s"' % xml_escape(exclude_str)
action_str = '<option command="delete" search="%s" path="%s" %s %s/>' % (
search,
xml_escape(path),
regex,
excludekeysxml,
)
yield Delete(parseString(action_str).childNodes[0])
if removeself:
action_str = '<option command="delete" search="file" path="%s"/>' % (
xml_escape(dirname)
)
yield Delete(parseString(action_str).childNodes[0])
|
def __make_file_provider(self, dirname, filename, recurse, removeself, excludekeys):
"""Change parsed FileKey to action provider"""
regex = ""
if recurse:
search = "walk.files"
path = dirname
if filename.startswith("*."):
filename = filename.replace("*.", ".")
if filename == ".*":
if removeself:
search = "walk.all"
else:
import fnmatch
regex = ' regex="%s" ' % (fnmatch.translate(filename))
else:
search = "glob"
path = os.path.join(dirname, filename)
if path.find("*") == -1:
search = "file"
excludekeysxml = ""
if excludekeys:
if len(excludekeys) > 1:
# multiple
exclude_str = "(%s)" % "|".join(excludekeys)
else:
# just one
exclude_str = excludekeys[0]
excludekeysxml = 'nwholeregex="%s"' % exclude_str
action_str = '<option command="delete" search="%s" path="%s" %s %s/>' % (
search,
xml_escape(path),
regex,
excludekeysxml,
)
yield Delete(parseString(action_str).childNodes[0])
if removeself:
action_str = '<option command="delete" search="file" path="%s"/>' % (
xml_escape(dirname)
)
yield Delete(parseString(action_str).childNodes[0])
|
https://github.com/bleachbit/bleachbit/issues/308
|
parsing error in section Spybot Search and Destroy Updates
Traceback (most recent call last):
File "bleachbit\Winapp.pyo", line 151, in __init__
File "bleachbit\Winapp.pyo", line 285, in handle_section
File "bleachbit\Winapp.pyo", line 357, in handle_filekey
File "bleachbit\Winapp.pyo", line 329, in __make_file_provider
File "xml\dom\minidom.pyo", line 1928, in parseString
File "xml\dom\expatbuilder.pyo", line 940, in parseString
File "xml\dom\expatbuilder.pyo", line 223, in parseString
ExpatError: not well-formed (invalid token): line 1, column 222
|
ExpatError
|
def start_with_computer(enabled):
"""If enabled, create shortcut to start application with computer.
If disabled, then delete the shortcut."""
if not enabled:
# User requests to not automatically start BleachBit
if os.path.lexists(bleachbit.autostart_path):
# Delete the shortcut
FileUtilities.delete(bleachbit.autostart_path)
return
# User requests to automatically start BleachBit
if os.path.lexists(bleachbit.autostart_path):
# Already automatic, so exit
return
if not os.path.exists(bleachbit.launcher_path):
logger.error("%s does not exist: ", bleachbit.launcher_path)
return
autostart_dir = os.path.dirname(bleachbit.autostart_path)
if not os.path.exists(autostart_dir):
General.makedirs(autostart_dir)
import shutil
shutil.copy(bleachbit.launcher_path, bleachbit.autostart_path)
os.chmod(bleachbit.autostart_path, 0o755)
if General.sudo_mode():
General.chownself(bleachbit.autostart_path)
|
def start_with_computer(enabled):
"""If enabled, create shortcut to start application with computer.
If disabled, then delete the shortcut."""
if not enabled:
# User requests to not automatically start BleachBit
if os.path.lexists(bleachbit.autostart_path):
# Delete the shortcut
FileUtilities.delete(bleachbit.autostart_path)
return
# User requests to automatically start BleachBit
if os.path.lexists(bleachbit.autostart_path):
# Already automatic, so exit
return
if not os.path.exists(bleachbit.launcher_path):
logger.error("%s does not exist: ", bleachbit.launcher_path)
return
import shutil
General.makedirs(os.path.dirname(bleachbit.autostart_path))
shutil.copy(bleachbit.launcher_path, bleachbit.autostart_path)
os.chmod(bleachbit.autostart_path, 0o755)
if General.sudo_mode():
General.chownself(bleachbit.autostart_path)
|
https://github.com/bleachbit/bleachbit/issues/231
|
FAIL: test_start_with_computer (tests.TestUnix.UnixTestCase)
Unit test for start_with_computer*
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/travis/build/bleachbit/bleachbit/tests/TestUnix.py", line 216, in test_start_with_computer
self.assertNotEqual(b, two_b)
AssertionError: False == False
|
AssertionError
|
def updateStartOnLogon(self):
"""
Configure Bitmessage to start on startup (or remove the
configuration) based on the setting in the keys.dat file
"""
startonlogon = BMConfigParser().safeGetBoolean("bitmessagesettings", "startonlogon")
if sys.platform.startswith("win"): # Auto-startup for Windows
RUN_PATH = (
"HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Run"
)
settings = QtCore.QSettings(RUN_PATH, QtCore.QSettings.NativeFormat)
# In case the user moves the program and the registry entry is
# no longer valid, this will delete the old registry entry.
if startonlogon:
settings.setValue("PyBitmessage", sys.argv[0])
else:
settings.remove("PyBitmessage")
else:
try: # get desktop plugin if any
self.desktop = get_plugin("desktop")()
self.desktop.adjust_startonlogon(startonlogon)
except (NameError, TypeError):
self.desktop = False
|
def updateStartOnLogon(self):
"""
Configure Bitmessage to start on startup (or remove the
configuration) based on the setting in the keys.dat file
"""
startonlogon = BMConfigParser().safeGetBoolean("bitmessagesettings", "startonlogon")
if "win32" in sys.platform or "win64" in sys.platform:
# Auto-startup for Windows
RUN_PATH = (
"HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Run"
)
settings = QtCore.QSettings(RUN_PATH, QtCore.QSettings.NativeFormat)
# In case the user moves the program and the registry entry is
# no longer valid, this will delete the old registry entry.
if startonlogon:
settings.setValue("PyBitmessage", sys.argv[0])
else:
settings.remove("PyBitmessage")
elif self.desktop:
self.desktop.adjust_startonlogon(startonlogon)
|
https://github.com/Bitmessage/PyBitmessage/issues/1735
|
$ Documents/Software/PyBitmessage/src/bitmessagemain.py
2021-02-13 20:05:18,826 - WARNING - Using default logger configuration
2021-02-13 20:05:23,429 - WARNING - /home/*****/.namecoin/namecoin.conf unreadable or missing, Namecoin support deactivated
2021-02-13 20:05:23,435 - WARNING - There was a problem testing for a Namecoin daemon. Hiding the Fetch Namecoin ID button
2021-02-13 20:05:23,436 - CRITICAL - Unhandled exception
Traceback (most recent call last):
File "Documents/Software/PyBitmessage/src/bitmessagemain.py", line 487, in <module>
File "Documents/Software/PyBitmessage/src/bitmessagemain.py", line 483, in main
File "Documents/Software/PyBitmessage/src/bitmessagemain.py", line 356, in start
File "/home/*****/Documents/Software/PyBitmessage/src/bitmessageqt/__init__.py", line 4188, in run
myapp = MyForm()
File "/home/*****/Documents/Software/PyBitmessage/src/bitmessageqt/__init__.py", line 825, in __init__
self.indicatorInit()
File "/home/*****/Documents/Software/PyBitmessage/src/bitmessageqt/__init__.py", line 1435, in indicatorInit
self.desktop = get_plugin('desktop')()
NameError: global name 'get_plugin' is not defined
|
NameError
|
def indicatorInit(self):
"""
Try init the distro specific appindicator,
for example the Ubuntu MessagingMenu
"""
def _noop_update(*args, **kwargs):
pass
try:
self.indicatorUpdate = get_plugin("indicator")(self)
except (NameError, TypeError):
logger.warning("No indicator plugin found")
self.indicatorUpdate = _noop_update
|
def indicatorInit(self):
"""
Try init the distro specific appindicator,
for example the Ubuntu MessagingMenu
"""
def _noop_update(*args, **kwargs):
pass
# get desktop plugin if any
if "win" not in sys.platform:
try:
self.desktop = get_plugin("desktop")()
except TypeError:
self.desktop = False
try:
self.indicatorUpdate = get_plugin("indicator")(self)
except (NameError, TypeError):
logger.warning("No indicator plugin found")
self.indicatorUpdate = _noop_update
|
https://github.com/Bitmessage/PyBitmessage/issues/1735
|
$ Documents/Software/PyBitmessage/src/bitmessagemain.py
2021-02-13 20:05:18,826 - WARNING - Using default logger configuration
2021-02-13 20:05:23,429 - WARNING - /home/*****/.namecoin/namecoin.conf unreadable or missing, Namecoin support deactivated
2021-02-13 20:05:23,435 - WARNING - There was a problem testing for a Namecoin daemon. Hiding the Fetch Namecoin ID button
2021-02-13 20:05:23,436 - CRITICAL - Unhandled exception
Traceback (most recent call last):
File "Documents/Software/PyBitmessage/src/bitmessagemain.py", line 487, in <module>
File "Documents/Software/PyBitmessage/src/bitmessagemain.py", line 483, in main
File "Documents/Software/PyBitmessage/src/bitmessagemain.py", line 356, in start
File "/home/*****/Documents/Software/PyBitmessage/src/bitmessageqt/__init__.py", line 4188, in run
myapp = MyForm()
File "/home/*****/Documents/Software/PyBitmessage/src/bitmessageqt/__init__.py", line 825, in __init__
self.indicatorInit()
File "/home/*****/Documents/Software/PyBitmessage/src/bitmessageqt/__init__.py", line 1435, in indicatorInit
self.desktop = get_plugin('desktop')()
NameError: global name 'get_plugin' is not defined
|
NameError
|
def __init__(self):
threading.Thread.__init__(self, name="objectProcessor")
random.seed()
# It may be the case that the last time Bitmessage was running,
# the user closed it before it finished processing everything in the
# objectProcessorQueue. Assuming that Bitmessage wasn't closed
# forcefully, it should have saved the data in the queue into the
# objectprocessorqueue table. Let's pull it out.
sql_ready.wait()
queryreturn = sqlQuery("""SELECT objecttype, data FROM objectprocessorqueue""")
for row in queryreturn:
objectType, data = row
queues.objectProcessorQueue.put((objectType, data))
sqlExecute("""DELETE FROM objectprocessorqueue""")
logger.debug(
"Loaded %s objects from disk into the objectProcessorQueue.", len(queryreturn)
)
self._ack_obj = bmproto.BMStringParser()
self.successfullyDecryptMessageTimings = []
|
def __init__(self):
threading.Thread.__init__(self, name="objectProcessor")
random.seed()
# It may be the case that the last time Bitmessage was running,
# the user closed it before it finished processing everything in the
# objectProcessorQueue. Assuming that Bitmessage wasn't closed
# forcefully, it should have saved the data in the queue into the
# objectprocessorqueue table. Let's pull it out.
queryreturn = sqlQuery("""SELECT objecttype, data FROM objectprocessorqueue""")
for row in queryreturn:
objectType, data = row
queues.objectProcessorQueue.put((objectType, data))
sqlExecute("""DELETE FROM objectprocessorqueue""")
logger.debug(
"Loaded %s objects from disk into the objectProcessorQueue.", len(queryreturn)
)
self._ack_obj = bmproto.BMStringParser()
self.successfullyDecryptMessageTimings = []
|
https://github.com/Bitmessage/PyBitmessage/issues/1702
|
2021-01-03 06:30:31,908 - CRITICAL - Unhandled exception
Traceback (most recent call last):
File "Documents/Software/PyBitmessage/src/bitmessagemain.py", line 491, in <module>
File "Documents/Software/PyBitmessage/src/bitmessagemain.py", line 487, in main
File "Documents/Software/PyBitmessage/src/bitmessagemain.py", line 286, in start
File "/home/*****/Documents/Software/PyBitmessage/src/class_objectProcessor.py", line 54, in __init__
'''SELECT objecttype, data FROM objectprocessorqueue''')
File "/home/*****/Documents/Software/PyBitmessage/src/helper_sql.py", line 44, in sqlQuery
assert sql_available
AssertionError
Number of threads: 1
Operating system is Ubuntu 18.04.5 LTS 32-bit.
|
AssertionError
|
def handlech(c, stdscr):
if c != curses.ERR:
global inboxcur, addrcur, sentcur, subcur, abookcur, blackcur
if c in range(256):
if chr(c) in "12345678":
global menutab
menutab = int(chr(c))
elif chr(c) == "q":
global quit
quit = True
elif chr(c) == "\n":
curses.curs_set(1)
d = Dialog(dialog="dialog")
if menutab == 1:
set_background_title(d, "Inbox Message Dialog Box")
r, t = d.menu(
'Do what with "'
+ inbox[inboxcur][5]
+ '" from "'
+ inbox[inboxcur][3]
+ '"?',
choices=[
("1", "View message"),
("2", "Mark message as unread"),
("3", "Reply"),
("4", "Add sender to Address Book"),
("5", "Save message as text file"),
("6", "Move to trash"),
],
)
if r == d.DIALOG_OK:
if t == "1": # View
set_background_title(
d,
'"'
+ inbox[inboxcur][5]
+ '" from "'
+ inbox[inboxcur][3]
+ '" to "'
+ inbox[inboxcur][1]
+ '"',
)
data = ""
ret = sqlQuery(
"SELECT message FROM inbox WHERE msgid=?",
inbox[inboxcur][0],
)
if ret != []:
for row in ret:
(data,) = row
data = shared.fixPotentiallyInvalidUTF8Data(data)
msg = ""
for i, item in enumerate(data.split("\n")):
msg += fill(item, replace_whitespace=False) + "\n"
scrollbox(d, unicode(ascii(msg)), 30, 80)
sqlExecute(
"UPDATE inbox SET read=1 WHERE msgid=?",
inbox[inboxcur][0],
)
inbox[inboxcur][7] = 1
else:
scrollbox(d, unicode("Could not fetch message."))
elif t == "2": # Mark unread
sqlExecute(
"UPDATE inbox SET read=0 WHERE msgid=?",
inbox[inboxcur][0],
)
inbox[inboxcur][7] = 0
elif t == "3": # Reply
curses.curs_set(1)
m = inbox[inboxcur]
fromaddr = m[4]
ischan = False
for i, item in enumerate(addresses):
if fromaddr == item[2] and item[3] != 0:
ischan = True
break
if not addresses[i][1]:
scrollbox(
d,
unicode(
"Sending address disabled, please either enable it or choose a different address."
),
)
return
toaddr = m[2]
if ischan:
toaddr = fromaddr
subject = m[5]
if not m[5][:4] == "Re: ":
subject = "Re: " + m[5]
body = ""
ret = sqlQuery(
"SELECT message FROM inbox WHERE msgid=?", m[0]
)
if ret != []:
body = "\n\n------------------------------------------------------\n"
for row in ret:
(body,) = row
sendMessage(fromaddr, toaddr, ischan, subject, body, True)
dialogreset(stdscr)
elif t == "4": # Add to Address Book
global addrbook
addr = inbox[inboxcur][4]
if addr not in [item[1] for i, item in enumerate(addrbook)]:
r, t = d.inputbox('Label for address "' + addr + '"')
if r == d.DIALOG_OK:
label = t
sqlExecute(
"INSERT INTO addressbook VALUES (?,?)",
label,
addr,
)
# Prepend entry
addrbook.reverse()
addrbook.append([label, addr])
addrbook.reverse()
else:
scrollbox(
d,
unicode(
"The selected address is already in the Address Book."
),
)
elif t == "5": # Save message
set_background_title(
d, 'Save "' + inbox[inboxcur][5] + '" as text file'
)
r, t = d.inputbox(
"Filename", init=inbox[inboxcur][5] + ".txt"
)
if r == d.DIALOG_OK:
msg = ""
ret = sqlQuery(
"SELECT message FROM inbox WHERE msgid=?",
inbox[inboxcur][0],
)
if ret != []:
for row in ret:
(msg,) = row
fh = open(
t, "a"
) # Open in append mode just in case
fh.write(msg)
fh.close()
else:
scrollbox(d, unicode("Could not fetch message."))
elif t == "6": # Move to trash
sqlExecute(
"UPDATE inbox SET folder='trash' WHERE msgid=?",
inbox[inboxcur][0],
)
del inbox[inboxcur]
scrollbox(
d,
unicode(
"Message moved to trash. There is no interface to view your trash, \nbut the message is still on disk if you are desperate to recover it."
),
)
elif menutab == 2:
a = ""
if addresses[addrcur][3] != 0: # if current address is a chan
a = addresses[addrcur][2]
sendMessage(addresses[addrcur][2], a)
elif menutab == 3:
set_background_title(d, "Sent Messages Dialog Box")
r, t = d.menu(
'Do what with "'
+ sentbox[sentcur][4]
+ '" to "'
+ sentbox[sentcur][0]
+ '"?',
choices=[("1", "View message"), ("2", "Move to trash")],
)
if r == d.DIALOG_OK:
if t == "1": # View
set_background_title(
d,
'"'
+ sentbox[sentcur][4]
+ '" from "'
+ sentbox[sentcur][3]
+ '" to "'
+ sentbox[sentcur][1]
+ '"',
)
data = ""
ret = sqlQuery(
"SELECT message FROM sent WHERE subject=? AND ackdata=?",
sentbox[sentcur][4],
sentbox[sentcur][6],
)
if ret != []:
for row in ret:
(data,) = row
data = shared.fixPotentiallyInvalidUTF8Data(data)
msg = ""
for i, item in enumerate(data.split("\n")):
msg += fill(item, replace_whitespace=False) + "\n"
scrollbox(d, unicode(ascii(msg)), 30, 80)
else:
scrollbox(d, unicode("Could not fetch message."))
elif t == "2": # Move to trash
sqlExecute(
"UPDATE sent SET folder='trash' WHERE subject=? AND ackdata=?",
sentbox[sentcur][4],
sentbox[sentcur][6],
)
del sentbox[sentcur]
scrollbox(
d,
unicode(
"Message moved to trash. There is no interface to view your trash, \nbut the message is still on disk if you are desperate to recover it."
),
)
elif menutab == 4:
set_background_title(d, "Your Identities Dialog Box")
if len(addresses) <= addrcur:
r, t = d.menu(
"Do what with addresses?",
choices=[("1", "Create new address")],
)
else:
r, t = d.menu(
'Do what with "'
+ addresses[addrcur][0]
+ '" : "'
+ addresses[addrcur][2]
+ '"?',
choices=[
("1", "Create new address"),
("2", "Send a message from this address"),
("3", "Rename"),
("4", "Enable"),
("5", "Disable"),
("6", "Delete"),
("7", "Special address behavior"),
],
)
if r == d.DIALOG_OK:
if t == "1": # Create new address
set_background_title(d, "Create new address")
scrollbox(
d,
unicode(
"Here you may generate as many addresses as you like.\n"
"Indeed, creating and abandoning addresses is encouraged.\n"
"Deterministic addresses have several pros and cons:\n"
"\nPros:\n"
" * You can recreate your addresses on any computer from memory\n"
" * You need not worry about backing up your keys.dat file as long as you \n can remember your passphrase\n"
"Cons:\n"
" * You must remember (or write down) your passphrase in order to recreate \n your keys if they are lost\n"
" * You must also remember the address version and stream numbers\n"
" * If you choose a weak passphrase someone may be able to brute-force it \n and then send and receive messages as you"
),
)
r, t = d.menu(
"Choose an address generation technique",
choices=[
("1", "Use a random number generator"),
("2", "Use a passphrase"),
],
)
if r == d.DIALOG_OK:
if t == "1":
set_background_title(d, "Randomly generate address")
r, t = d.inputbox(
"Label (not shown to anyone except you)"
)
label = ""
if r == d.DIALOG_OK and len(t) > 0:
label = t
r, t = d.menu(
"Choose a stream",
choices=[
("1", "Use the most available stream"),
(
"",
"(Best if this is the first of many addresses you will create)",
),
(
"2",
"Use the same stream as an existing address",
),
(
"",
"(Saves you some bandwidth and processing power)",
),
],
)
if r == d.DIALOG_OK:
if t == "1":
stream = 1
elif t == "2":
addrs = []
for i, item in enumerate(addresses):
addrs.append([str(i), item[2]])
r, t = d.menu(
"Choose an existing address's stream",
choices=addrs,
)
if r == d.DIALOG_OK:
stream = decodeAddress(
addrs[int(t)][1]
)[2]
shorten = False
r, t = d.checklist(
"Miscellaneous options",
choices=[
(
"1",
"Spend time shortening the address",
1 if shorten else 0,
)
],
)
if r == d.DIALOG_OK and "1" in t:
shorten = True
shared.addressGeneratorQueue.put(
(
"createRandomAddress",
4,
stream,
label,
1,
"",
shorten,
)
)
elif t == "2":
set_background_title(
d, "Make deterministic addresses"
)
r, t = d.passwordform(
"Enter passphrase",
[
("Passphrase", 1, 1, "", 2, 1, 64, 128),
(
"Confirm passphrase",
3,
1,
"",
4,
1,
64,
128,
),
],
form_height=4,
insecure=True,
)
if r == d.DIALOG_OK:
if t[0] == t[1]:
passphrase = t[0]
r, t = d.rangebox(
"Number of addresses to generate",
width=48,
min=1,
max=99,
init=8,
)
if r == d.DIALOG_OK:
number = t
stream = 1
shorten = False
r, t = d.checklist(
"Miscellaneous options",
choices=[
(
"1",
"Spend time shortening the address",
1 if shorten else 0,
)
],
)
if r == d.DIALOG_OK and "1" in t:
shorten = True
scrollbox(
d,
unicode(
"In addition to your passphrase, be sure to remember the following numbers:\n"
"\n * Address version number: "
+ str(4)
+ "\n"
" * Stream number: "
+ str(stream)
),
)
shared.addressGeneratorQueue.put(
(
"createDeterministicAddresses",
4,
stream,
"unused deterministic address",
number,
str(passphrase),
shorten,
)
)
else:
scrollbox(
d, unicode("Passphrases do not match")
)
elif t == "2": # Send a message
a = ""
if (
addresses[addrcur][3] != 0
): # if current address is a chan
a = addresses[addrcur][2]
sendMessage(addresses[addrcur][2], a)
elif t == "3": # Rename address label
a = addresses[addrcur][2]
label = addresses[addrcur][0]
r, t = d.inputbox("New address label", init=label)
if r == d.DIALOG_OK:
label = t
shared.config.set(a, "label", label)
# Write config
shared.writeKeysFile()
addresses[addrcur][0] = label
elif t == "4": # Enable address
a = addresses[addrcur][2]
shared.config.set(a, "enabled", "true") # Set config
# Write config
shared.writeKeysFile()
# Change color
if shared.safeConfigGetBoolean(a, "chan"):
addresses[addrcur][3] = 9 # orange
elif shared.safeConfigGetBoolean(a, "mailinglist"):
addresses[addrcur][3] = 5 # magenta
else:
addresses[addrcur][3] = 0 # black
addresses[addrcur][1] = True
shared.reloadMyAddressHashes() # Reload address hashes
elif t == "5": # Disable address
a = addresses[addrcur][2]
shared.config.set(a, "enabled", "false") # Set config
addresses[addrcur][3] = 8 # Set color to gray
# Write config
shared.writeKeysFile()
addresses[addrcur][1] = False
shared.reloadMyAddressHashes() # Reload address hashes
elif t == "6": # Delete address
r, t = d.inputbox(
'Type in "I want to delete this address"', width=50
)
if (
r == d.DIALOG_OK
and t == "I want to delete this address"
):
shared.config.remove_section(addresses[addrcur][2])
shared.writeKeysFile()
del addresses[addrcur]
elif t == "7": # Special address behavior
a = addresses[addrcur][2]
set_background_title(d, "Special address behavior")
if shared.safeConfigGetBoolean(a, "chan"):
scrollbox(
d,
unicode(
"This is a chan address. You cannot use it as a pseudo-mailing list."
),
)
else:
m = shared.safeConfigGetBoolean(a, "mailinglist")
r, t = d.radiolist(
"Select address behavior",
choices=[
("1", "Behave as a normal address", not m),
(
"2",
"Behave as a pseudo-mailing-list address",
m,
),
],
)
if r == d.DIALOG_OK:
if t == "1" and m == True:
shared.config.set(a, "mailinglist", "false")
if addresses[addrcur][1]:
addresses[addrcur][3] = (
0 # Set color to black
)
else:
addresses[addrcur][3] = (
8 # Set color to gray
)
elif t == "2" and m == False:
try:
mn = shared.config.get(a, "mailinglistname")
except ConfigParser.NoOptionError:
mn = ""
r, t = d.inputbox("Mailing list name", init=mn)
if r == d.DIALOG_OK:
mn = t
shared.config.set(a, "mailinglist", "true")
shared.config.set(a, "mailinglistname", mn)
addresses[addrcur][3] = (
6 # Set color to magenta
)
# Write config
shared.writeKeysFile()
elif menutab == 5:
set_background_title(d, "Subscriptions Dialog Box")
if len(subscriptions) <= subcur:
r, t = d.menu(
'Do what with subscription to "'
+ subscriptions[subcur][0]
+ '"?',
choices=[("1", "Add new subscription")],
)
else:
r, t = d.menu(
'Do what with subscription to "'
+ subscriptions[subcur][0]
+ '"?',
choices=[
("1", "Add new subscription"),
("2", "Delete this subscription"),
("3", "Enable"),
("4", "Disable"),
],
)
if r == d.DIALOG_OK:
if t == "1":
r, t = d.inputbox("New subscription address")
if r == d.DIALOG_OK:
addr = addBMIfNotPresent(t)
if not shared.isAddressInMySubscriptionsList(addr):
r, t = d.inputbox("New subscription label")
if r == d.DIALOG_OK:
label = t
# Prepend entry
subscriptions.reverse()
subscriptions.append([label, addr, True])
subscriptions.reverse()
sqlExecute(
"INSERT INTO subscriptions VALUES (?,?,?)",
label,
address,
True,
)
shared.reloadBroadcastSendersForWhichImWatching()
elif t == "2":
r, t = d.inpuxbox(
'Type in "I want to delete this subscription"'
)
if (
r == d.DIALOG_OK
and t == "I want to delete this subscription"
):
sqlExecute(
"DELETE FROM subscriptions WHERE label=? AND address=?",
subscriptions[subcur][0],
subscriptions[subcur][1],
)
shared.reloadBroadcastSendersForWhichImWatching()
del subscriptions[subcur]
elif t == "3":
sqlExecute(
"UPDATE subscriptions SET enabled=1 WHERE label=? AND address=?",
subscriptions[subcur][0],
subscriptions[subcur][1],
)
shared.reloadBroadcastSendersForWhichImWatching()
subscriptions[subcur][2] = True
elif t == "4":
sqlExecute(
"UPDATE subscriptions SET enabled=0 WHERE label=? AND address=?",
subscriptions[subcur][0],
subscriptions[subcur][1],
)
shared.reloadBroadcastSendersForWhichImWatching()
subscriptions[subcur][2] = False
elif menutab == 6:
set_background_title(d, "Address Book Dialog Box")
if len(addrbook) <= abookcur:
r, t = d.menu(
"Do what with addressbook?",
choices=[("3", "Add new address to Address Book")],
)
else:
r, t = d.menu(
'Do what with "'
+ addrbook[abookcur][0]
+ '" : "'
+ addrbook[abookcur][1]
+ '"',
choices=[
("1", "Send a message to this address"),
("2", "Subscribe to this address"),
("3", "Add new address to Address Book"),
("4", "Delete this address"),
],
)
if r == d.DIALOG_OK:
if t == "1":
sendMessage(recv=addrbook[abookcur][1])
elif t == "2":
r, t = d.inputbox("New subscription label")
if r == d.DIALOG_OK:
label = t
# Prepend entry
subscriptions.reverse()
subscriptions.append([label, addr, True])
subscriptions.reverse()
sqlExecute(
"INSERT INTO subscriptions VALUES (?,?,?)",
label,
address,
True,
)
shared.reloadBroadcastSendersForWhichImWatching()
elif t == "3":
r, t = d.inputbox("Input new address")
if r == d.DIALOG_OK:
addr = t
if addr not in [
item[1] for i, item in enumerate(addrbook)
]:
r, t = d.inputbox(
'Label for address "' + addr + '"'
)
if r == d.DIALOG_OK:
sqlExecute(
"INSERT INTO addressbook VALUES (?,?)",
t,
addr,
)
# Prepend entry
addrbook.reverse()
addrbook.append([t, addr])
addrbook.reverse()
else:
scrollbox(
d,
unicode(
"The selected address is already in the Address Book."
),
)
elif t == "4":
r, t = d.inputbox(
'Type in "I want to delete this Address Book entry"'
)
if (
r == d.DIALOG_OK
and t == "I want to delete this Address Book entry"
):
sqlExecute(
"DELETE FROM addressbook WHERE label=? AND address=?",
addrbook[abookcur][0],
addrbook[abookcur][1],
)
del addrbook[abookcur]
elif menutab == 7:
set_background_title(d, "Blacklist Dialog Box")
r, t = d.menu(
'Do what with "'
+ blacklist[blackcur][0]
+ '" : "'
+ blacklist[blackcur][1]
+ '"?',
choices=[("1", "Delete"), ("2", "Enable"), ("3", "Disable")],
)
if r == d.DIALOG_OK:
if t == "1":
r, t = d.inputbox(
'Type in "I want to delete this Blacklist entry"'
)
if (
r == d.DIALOG_OK
and t == "I want to delete this Blacklist entry"
):
sqlExecute(
"DELETE FROM blacklist WHERE label=? AND address=?",
blacklist[blackcur][0],
blacklist[blackcur][1],
)
del blacklist[blackcur]
elif t == "2":
sqlExecute(
"UPDATE blacklist SET enabled=1 WHERE label=? AND address=?",
blacklist[blackcur][0],
blacklist[blackcur][1],
)
blacklist[blackcur][2] = True
elif t == "3":
sqlExecute(
"UPDATE blacklist SET enabled=0 WHERE label=? AND address=?",
blacklist[blackcur][0],
blacklist[blackcur][1],
)
blacklist[blackcur][2] = False
dialogreset(stdscr)
else:
if c == curses.KEY_UP:
if menutab == 1 and inboxcur > 0:
inboxcur -= 1
if (menutab == 2 or menutab == 4) and addrcur > 0:
addrcur -= 1
if menutab == 3 and sentcur > 0:
sentcur -= 1
if menutab == 5 and subcur > 0:
subcur -= 1
if menutab == 6 and abookcur > 0:
abookcur -= 1
if menutab == 7 and blackcur > 0:
blackcur -= 1
elif c == curses.KEY_DOWN:
if menutab == 1 and inboxcur < len(inbox) - 1:
inboxcur += 1
if (menutab == 2 or menutab == 4) and addrcur < len(addresses) - 1:
addrcur += 1
if menutab == 3 and sentcur < len(sentbox) - 1:
sentcur += 1
if menutab == 5 and subcur < len(subscriptions) - 1:
subcur += 1
if menutab == 6 and abookcur < len(addrbook) - 1:
abookcur += 1
if menutab == 7 and blackcur < len(blacklist) - 1:
blackcur += 1
elif c == curses.KEY_HOME:
if menutab == 1:
inboxcur = 0
if menutab == 2 or menutab == 4:
addrcur = 0
if menutab == 3:
sentcur = 0
if menutab == 5:
subcur = 0
if menutab == 6:
abookcur = 0
if menutab == 7:
blackcur = 0
elif c == curses.KEY_END:
if menutab == 1:
inboxcur = len(inbox) - 1
if menutab == 2 or menutab == 4:
addrcur = len(addresses) - 1
if menutab == 3:
sentcur = len(sentbox) - 1
if menutab == 5:
subcur = len(subscriptions) - 1
if menutab == 6:
abookcur = len(addrbook) - 1
if menutab == 7:
blackcur = len(blackcur) - 1
redraw(stdscr)
|
def handlech(c, stdscr):
if c != curses.ERR:
global inboxcur, addrcur, sentcur, subcur, abookcur, blackcur
if c in range(256):
if chr(c) in "12345678":
global menutab
menutab = int(chr(c))
elif chr(c) == "q":
global quit
quit = True
elif chr(c) == "\n":
curses.curs_set(1)
d = Dialog(dialog="dialog")
if menutab == 1:
d.set_background_title("Inbox Message Dialog Box")
r, t = d.menu(
'Do what with "'
+ inbox[inboxcur][5]
+ '" from "'
+ inbox[inboxcur][3]
+ '"?',
choices=[
("1", "View message"),
("2", "Mark message as unread"),
("3", "Reply"),
("4", "Add sender to Address Book"),
("5", "Save message as text file"),
("6", "Move to trash"),
],
)
if r == d.DIALOG_OK:
if t == "1": # View
d.set_background_title(
'"'
+ inbox[inboxcur][5]
+ '" from "'
+ inbox[inboxcur][3]
+ '" to "'
+ inbox[inboxcur][1]
+ '"'
)
data = ""
ret = sqlQuery(
"SELECT message FROM inbox WHERE msgid=?",
inbox[inboxcur][0],
)
if ret != []:
for row in ret:
(data,) = row
data = shared.fixPotentiallyInvalidUTF8Data(data)
msg = ""
for i, item in enumerate(data.split("\n")):
msg += fill(item, replace_whitespace=False) + "\n"
d.scrollbox(
unicode(ascii(msg)), 30, 80, exit_label="Continue"
)
sqlExecute(
"UPDATE inbox SET read=1 WHERE msgid=?",
inbox[inboxcur][0],
)
inbox[inboxcur][7] = 1
else:
d.scrollbox(
unicode("Could not fetch message."),
exit_label="Continue",
)
elif t == "2": # Mark unread
sqlExecute(
"UPDATE inbox SET read=0 WHERE msgid=?",
inbox[inboxcur][0],
)
inbox[inboxcur][7] = 0
elif t == "3": # Reply
curses.curs_set(1)
m = inbox[inboxcur]
fromaddr = m[4]
ischan = False
for i, item in enumerate(addresses):
if fromaddr == item[2] and item[3] != 0:
ischan = True
break
if not addresses[i][1]:
d.scrollbox(
unicode(
"Sending address disabled, please either enable it or choose a different address."
),
exit_label="Continue",
)
return
toaddr = m[2]
if ischan:
toaddr = fromaddr
subject = m[5]
if not m[5][:4] == "Re: ":
subject = "Re: " + m[5]
body = ""
ret = sqlQuery(
"SELECT message FROM inbox WHERE msgid=?", m[0]
)
if ret != []:
body = "\n\n------------------------------------------------------\n"
for row in ret:
(body,) = row
sendMessage(fromaddr, toaddr, ischan, subject, body, True)
dialogreset(stdscr)
elif t == "4": # Add to Address Book
global addrbook
addr = inbox[inboxcur][4]
if addr not in [item[1] for i, item in enumerate(addrbook)]:
r, t = d.inputbox('Label for address "' + addr + '"')
if r == d.DIALOG_OK:
label = t
sqlExecute(
"INSERT INTO addressbook VALUES (?,?)",
label,
addr,
)
# Prepend entry
addrbook.reverse()
addrbook.append([label, addr])
addrbook.reverse()
else:
d.scrollbox(
unicode(
"The selected address is already in the Address Book."
),
exit_label="Continue",
)
elif t == "5": # Save message
d.set_background_title(
'Save "' + inbox[inboxcur][5] + '" as text file'
)
r, t = d.inputbox(
"Filename", init=inbox[inboxcur][5] + ".txt"
)
if r == d.DIALOG_OK:
msg = ""
ret = sqlQuery(
"SELECT message FROM inbox WHERE msgid=?",
inbox[inboxcur][0],
)
if ret != []:
for row in ret:
(msg,) = row
fh = open(
t, "a"
) # Open in append mode just in case
fh.write(msg)
fh.close()
else:
d.scrollbox(
unicode("Could not fetch message."),
exit_label="Continue",
)
elif t == "6": # Move to trash
sqlExecute(
"UPDATE inbox SET folder='trash' WHERE msgid=?",
inbox[inboxcur][0],
)
del inbox[inboxcur]
d.scrollbox(
unicode(
"Message moved to trash. There is no interface to view your trash, \nbut the message is still on disk if you are desperate to recover it."
),
exit_label="Continue",
)
elif menutab == 2:
a = ""
if addresses[addrcur][3] != 0: # if current address is a chan
a = addresses[addrcur][2]
sendMessage(addresses[addrcur][2], a)
elif menutab == 3:
d.set_background_title("Sent Messages Dialog Box")
r, t = d.menu(
'Do what with "'
+ sentbox[sentcur][4]
+ '" to "'
+ sentbox[sentcur][0]
+ '"?',
choices=[("1", "View message"), ("2", "Move to trash")],
)
if r == d.DIALOG_OK:
if t == "1": # View
d.set_background_title(
'"'
+ sentbox[sentcur][4]
+ '" from "'
+ sentbox[sentcur][3]
+ '" to "'
+ sentbox[sentcur][1]
+ '"'
)
data = ""
ret = sqlQuery(
"SELECT message FROM sent WHERE subject=? AND ackdata=?",
sentbox[sentcur][4],
sentbox[sentcur][6],
)
if ret != []:
for row in ret:
(data,) = row
data = shared.fixPotentiallyInvalidUTF8Data(data)
msg = ""
for i, item in enumerate(data.split("\n")):
msg += fill(item, replace_whitespace=False) + "\n"
d.scrollbox(
unicode(ascii(msg)), 30, 80, exit_label="Continue"
)
else:
d.scrollbox(
unicode("Could not fetch message."),
exit_label="Continue",
)
elif t == "2": # Move to trash
sqlExecute(
"UPDATE sent SET folder='trash' WHERE subject=? AND ackdata=?",
sentbox[sentcur][4],
sentbox[sentcur][6],
)
del sentbox[sentcur]
d.scrollbox(
unicode(
"Message moved to trash. There is no interface to view your trash, \nbut the message is still on disk if you are desperate to recover it."
),
exit_label="Continue",
)
elif menutab == 4:
d.set_background_title("Your Identities Dialog Box")
r, t = d.menu(
'Do what with "'
+ addresses[addrcur][0]
+ '" : "'
+ addresses[addrcur][2]
+ '"?',
choices=[
("1", "Create new address"),
("2", "Send a message from this address"),
("3", "Rename"),
("4", "Enable"),
("5", "Disable"),
("6", "Delete"),
("7", "Special address behavior"),
],
)
if r == d.DIALOG_OK:
if t == "1": # Create new address
d.set_background_title("Create new address")
d.scrollbox(
unicode(
"Here you may generate as many addresses as you like.\n"
"Indeed, creating and abandoning addresses is encouraged.\n"
"Deterministic addresses have several pros and cons:\n"
"\nPros:\n"
" * You can recreate your addresses on any computer from memory\n"
" * You need not worry about backing up your keys.dat file as long as you \n can remember your passphrase\n"
"Cons:\n"
" * You must remember (or write down) your passphrase in order to recreate \n your keys if they are lost\n"
" * You must also remember the address version and stream numbers\n"
" * If you choose a weak passphrase someone may be able to brute-force it \n and then send and receive messages as you"
),
exit_label="Continue",
)
r, t = d.menu(
"Choose an address generation technique",
choices=[
("1", "Use a random number generator"),
("2", "Use a passphrase"),
],
)
if r == d.DIALOG_OK:
if t == "1":
d.set_background_title("Randomly generate address")
r, t = d.inputbox(
"Label (not shown to anyone except you)"
)
label = ""
if r == d.DIALOG_OK and len(t) > 0:
label = t
r, t = d.menu(
"Choose a stream",
choices=[
("1", "Use the most available stream"),
(
"",
"(Best if this is the first of many addresses you will create)",
),
(
"2",
"Use the same stream as an existing address",
),
(
"",
"(Saves you some bandwidth and processing power)",
),
],
)
if r == d.DIALOG_OK:
if t == "1":
stream = 1
elif t == "2":
addrs = []
for i, item in enumerate(addresses):
addrs.append([str(i), item[2]])
r, t = d.menu(
"Choose an existing address's stream",
choices=addrs,
)
if r == d.DIALOG_OK:
stream = decodeAddress(
addrs[int(t)][1]
)[2]
shorten = False
r, t = d.checklist(
"Miscellaneous options",
choices=[
(
"1",
"Spend time shortening the address",
shorten,
)
],
)
if r == d.DIALOG_OK and "1" in t:
shorten = True
shared.addressGeneratorQueue.put(
(
"createRandomAddress",
4,
stream,
label,
1,
"",
shorten,
)
)
elif t == "2":
d.set_background_title(
"Make deterministic addresses"
)
r, t = d.passwordform(
"Enter passphrase",
[
("Passphrase", 1, 1, "", 2, 1, 64, 128),
(
"Confirm passphrase",
3,
1,
"",
4,
1,
64,
128,
),
],
form_height=4,
insecure=True,
)
if r == d.DIALOG_OK:
if t[0] == t[1]:
passphrase = t[0]
r, t = d.rangebox(
"Number of addresses to generate",
width=48,
min=1,
max=99,
init=8,
)
if r == d.DIALOG_OK:
number = t
stream = 1
shorten = False
r, t = d.checklist(
"Miscellaneous options",
choices=[
(
"1",
"Spend time shortening the address",
shorten,
)
],
)
if r == d.DIALOG_OK and "1" in t:
shorten = True
d.scrollbox(
unicode(
"In addition to your passphrase, be sure to remember the following numbers:\n"
"\n * Address version number: "
+ str(4)
+ "\n"
" * Stream number: "
+ str(stream)
),
exit_label="Continue",
)
shared.addressGeneratorQueue.put(
(
"createDeterministicAddresses",
4,
stream,
"unused deterministic address",
number,
str(passphrase),
shorten,
)
)
else:
d.scrollbox(
unicode("Passphrases do not match"),
exit_label="Continue",
)
elif t == "2": # Send a message
a = ""
if (
addresses[addrcur][3] != 0
): # if current address is a chan
a = addresses[addrcur][2]
sendMessage(addresses[addrcur][2], a)
elif t == "3": # Rename address label
a = addresses[addrcur][2]
label = addresses[addrcur][0]
r, t = d.inputbox("New address label", init=label)
if r == d.DIALOG_OK:
label = t
shared.config.set(a, "label", label)
# Write config
shared.writeKeysFile()
addresses[addrcur][0] = label
elif t == "4": # Enable address
a = addresses[addrcur][2]
shared.config.set(a, "enabled", "true") # Set config
# Write config
shared.writeKeysFile()
# Change color
if shared.safeConfigGetBoolean(a, "chan"):
addresses[addrcur][3] = 9 # orange
elif shared.safeConfigGetBoolean(a, "mailinglist"):
addresses[addrcur][3] = 5 # magenta
else:
addresses[addrcur][3] = 0 # black
addresses[addrcur][1] = True
shared.reloadMyAddressHashes() # Reload address hashes
elif t == "5": # Disable address
a = addresses[addrcur][2]
shared.config.set(a, "enabled", "false") # Set config
addresses[addrcur][3] = 8 # Set color to gray
# Write config
shared.writeKeysFile()
addresses[addrcur][1] = False
shared.reloadMyAddressHashes() # Reload address hashes
elif t == "6": # Delete address
r, t = d.inputbox(
'Type in "I want to delete this address"', width=50
)
if (
r == d.DIALOG_OK
and t == "I want to delete this address"
):
shared.config.remove_section(addresses[addrcur][2])
shared.writeKeysFile()
del addresses[addrcur]
elif t == "7": # Special address behavior
a = addresses[addrcur][2]
d.set_background_title("Special address behavior")
if shared.safeConfigGetBoolean(a, "chan"):
d.scrollbox(
unicode(
"This is a chan address. You cannot use it as a pseudo-mailing list."
),
exit_label="Continue",
)
else:
m = shared.safeConfigGetBoolean(a, "mailinglist")
r, t = d.radiolist(
"Select address behavior",
choices=[
("1", "Behave as a normal address", not m),
(
"2",
"Behave as a pseudo-mailing-list address",
m,
),
],
)
if r == d.DIALOG_OK:
if t == "1" and m == True:
shared.config.set(a, "mailinglist", "false")
if addresses[addrcur][1]:
addresses[addrcur][3] = (
0 # Set color to black
)
else:
addresses[addrcur][3] = (
8 # Set color to gray
)
elif t == "2" and m == False:
try:
mn = shared.config.get(a, "mailinglistname")
except ConfigParser.NoOptionError:
mn = ""
r, t = d.inputbox("Mailing list name", init=mn)
if r == d.DIALOG_OK:
mn = t
shared.config.set(a, "mailinglist", "true")
shared.config.set(a, "mailinglistname", mn)
addresses[addrcur][3] = (
6 # Set color to magenta
)
# Write config
shared.writeKeysFile()
elif menutab == 5:
d.set_background_title("Subscriptions Dialog Box")
r, t = d.menu(
'Do what with subscription to "'
+ subscriptions[subcur][0]
+ '"?',
choices=[
("1", "Add new subscription"),
("2", "Delete this subscription"),
("3", "Enable"),
("4", "Disable"),
],
)
if r == d.DIALOG_OK:
if t == "1":
r, t = d.inputbox("New subscription address")
if r == d.DIALOG_OK:
addr = addBMIfNotPresent(t)
if not shared.isAddressInMySubscriptionsList(addr):
r, t = d.inputbox("New subscription label")
if r == d.DIALOG_OK:
label = t
# Prepend entry
subscriptions.reverse()
subscriptions.append([label, addr, True])
subscriptions.reverse()
sqlExecute(
"INSERT INTO subscriptions VALUES (?,?,?)",
label,
address,
True,
)
shared.reloadBroadcastSendersForWhichImWatching()
elif t == "2":
r, t = d.inpuxbox(
'Type in "I want to delete this subscription"'
)
if (
r == d.DIALOG_OK
and t == "I want to delete this subscription"
):
sqlExecute(
"DELETE FROM subscriptions WHERE label=? AND address=?",
subscriptions[subcur][0],
subscriptions[subcur][1],
)
shared.reloadBroadcastSendersForWhichImWatching()
del subscriptions[subcur]
elif t == "3":
sqlExecute(
"UPDATE subscriptions SET enabled=1 WHERE label=? AND address=?",
subscriptions[subcur][0],
subscriptions[subcur][1],
)
shared.reloadBroadcastSendersForWhichImWatching()
subscriptions[subcur][2] = True
elif t == "4":
sqlExecute(
"UPDATE subscriptions SET enabled=0 WHERE label=? AND address=?",
subscriptions[subcur][0],
subscriptions[subcur][1],
)
shared.reloadBroadcastSendersForWhichImWatching()
subscriptions[subcur][2] = False
elif menutab == 6:
d.set_background_title("Address Book Dialog Box")
r, t = d.menu(
'Do what with "'
+ addrbook[abookcur][0]
+ '" : "'
+ addrbook[abookcur][1]
+ '"',
choices=[
("1", "Send a message to this address"),
("2", "Subscribe to this address"),
("3", "Add new address to Address Book"),
("4", "Delete this address"),
],
)
if r == d.DIALOG_OK:
if t == "1":
sendMessage(recv=addrbook[abookcur][1])
elif t == "2":
r, t = d.inputbox("New subscription label")
if r == d.DIALOG_OK:
label = t
# Prepend entry
subscriptions.reverse()
subscriptions.append([label, addr, True])
subscriptions.reverse()
sqlExecute(
"INSERT INTO subscriptions VALUES (?,?,?)",
label,
address,
True,
)
shared.reloadBroadcastSendersForWhichImWatching()
elif t == "3":
r, t = d.inputbox("Input new address")
if r == d.DIALOG_OK:
addr = t
if addr not in [
item[1] for i, item in enumerate(addrbook)
]:
r, t = d.inputbox(
'Label for address "' + addr + '"'
)
if r == d.DIALOG_OK:
sqlExecute(
"INSERT INTO addressbook VALUES (?,?)",
t,
addr,
)
# Prepend entry
addrbook.reverse()
addrbook.append([t, addr])
addrbook.reverse()
else:
d.scrollbox(
unicode(
"The selected address is already in the Address Book."
),
exit_label="Continue",
)
elif t == "4":
r, t = d.inputbox(
'Type in "I want to delete this Address Book entry"'
)
if (
r == d.DIALOG_OK
and t == "I want to delete this Address Book entry"
):
sqlExecute(
"DELETE FROM addressbook WHERE label=? AND address=?",
addrbook[abookcur][0],
addrbook[abookcur][1],
)
del addrbook[abookcur]
elif menutab == 7:
d.set_background_title("Blacklist Dialog Box")
r, t = d.menu(
'Do what with "'
+ blacklist[blackcur][0]
+ '" : "'
+ blacklist[blackcur][1]
+ '"?',
choices=[("1", "Delete"), ("2", "Enable"), ("3", "Disable")],
)
if r == d.DIALOG_OK:
if t == "1":
r, t = d.inputbox(
'Type in "I want to delete this Blacklist entry"'
)
if (
r == d.DIALOG_OK
and t == "I want to delete this Blacklist entry"
):
sqlExecute(
"DELETE FROM blacklist WHERE label=? AND address=?",
blacklist[blackcur][0],
blacklist[blackcur][1],
)
del blacklist[blackcur]
elif t == "2":
sqlExecute(
"UPDATE blacklist SET enabled=1 WHERE label=? AND address=?",
blacklist[blackcur][0],
blacklist[blackcur][1],
)
blacklist[blackcur][2] = True
elif t == "3":
sqlExecute(
"UPDATE blacklist SET enabled=0 WHERE label=? AND address=?",
blacklist[blackcur][0],
blacklist[blackcur][1],
)
blacklist[blackcur][2] = False
dialogreset(stdscr)
else:
if c == curses.KEY_UP:
if menutab == 1 and inboxcur > 0:
inboxcur -= 1
if (menutab == 2 or menutab == 4) and addrcur > 0:
addrcur -= 1
if menutab == 3 and sentcur > 0:
sentcur -= 1
if menutab == 5 and subcur > 0:
subcur -= 1
if menutab == 6 and abookcur > 0:
abookcur -= 1
if menutab == 7 and blackcur > 0:
blackcur -= 1
elif c == curses.KEY_DOWN:
if menutab == 1 and inboxcur < len(inbox) - 1:
inboxcur += 1
if (menutab == 2 or menutab == 4) and addrcur < len(addresses) - 1:
addrcur += 1
if menutab == 3 and sentcur < len(sentbox) - 1:
sentcur += 1
if menutab == 5 and subcur < len(subscriptions) - 1:
subcur += 1
if menutab == 6 and abookcur < len(addrbook) - 1:
abookcur += 1
if menutab == 7 and blackcur < len(blacklist) - 1:
blackcur += 1
elif c == curses.KEY_HOME:
if menutab == 1:
inboxcur = 0
if menutab == 2 or menutab == 4:
addrcur = 0
if menutab == 3:
sentcur = 0
if menutab == 5:
subcur = 0
if menutab == 6:
abookcur = 0
if menutab == 7:
blackcur = 0
elif c == curses.KEY_END:
if menutab == 1:
inboxcur = len(inbox) - 1
if menutab == 2 or menutab == 4:
addrcur = len(addresses) - 1
if menutab == 3:
sentcur = len(sentbox) - 1
if menutab == 5:
subcur = len(subscriptions) - 1
if menutab == 6:
abookcur = len(addrbook) - 1
if menutab == 7:
blackcur = len(blackcur) - 1
redraw(stdscr)
|
https://github.com/Bitmessage/PyBitmessage/issues/711
|
Loading existing config files from /home/aaron/.config/PyBitmessage/
An Exception occurred within isOurOperatingSystemLimitedToHavingVeryFewHalfOpenConnections: invalid version number '#1 SMP Debian 3.14.2-1 (2014-04-28)'
Running with curses
Loading inbox messages...
Loading sent messages...
Loading address book...
Traceback (most recent call last):
File "./src/bitmessagemain.py", line 272, in <module>
mainprogram.start()
File "./src/bitmessagemain.py", line 243, in start
bitmessagecurses.runwrapper()
File "/home/aaron/src/bitmessage/src/bitmessagecurses/__init__.py", line 984, in runwrapper
curses.wrapper(run)
File "/usr/lib/python2.7/curses/wrapper.py", line 43, in wrapper
return func(stdscr, *args, **kwds)
File "/home/aaron/src/bitmessage/src/bitmessagecurses/__init__.py", line 1030, in run
handlech(stdscr.getch(), stdscr)
File "/home/aaron/src/bitmessage/src/bitmessagecurses/__init__.py", line 383, in handlech
d.set_background_title("Your Identities Dialog Box")
AttributeError: Dialog instance has no attribute 'set_background_title'
|
AttributeError
|
def sendMessage(sender="", recv="", broadcast=None, subject="", body="", reply=False):
if sender == "":
return
d = Dialog(dialog="dialog")
set_background_title(d, "Send a message")
if recv == "":
r, t = d.inputbox(
"Recipient address (Cancel to load from the Address Book or leave blank to broadcast)",
10,
60,
)
if r != d.DIALOG_OK:
global menutab
menutab = 6
return
recv = t
if broadcast == None and sender != recv:
r, t = d.radiolist(
"How to send the message?",
choices=[
("1", "Send to one or more specific people", 1),
("2", "Broadcast to everyone who is subscribed to your address", 0),
],
)
if r != d.DIALOG_OK:
return
broadcast = False
if t == "2": # Broadcast
broadcast = True
if subject == "" or reply:
r, t = d.inputbox("Message subject", width=60, init=subject)
if r != d.DIALOG_OK:
return
subject = t
if body == "" or reply:
r, t = d.inputbox("Message body", 10, 80, init=body)
if r != d.DIALOG_OK:
return
body = t
body = body.replace("\\n", "\n").replace("\\t", "\t")
if not broadcast:
recvlist = []
for i, item in enumerate(recv.replace(",", ";").split(";")):
recvlist.append(item.strip())
list(set(recvlist)) # Remove exact duplicates
for addr in recvlist:
if addr != "":
status, version, stream, ripe = decodeAddress(addr)
if status != "success":
set_background_title(d, "Recipient address error")
err = "Could not decode" + addr + " : " + status + "\n\n"
if status == "missingbm":
err += 'Bitmessage addresses should start with "BM-".'
elif status == "checksumfailed":
err += "The address was not typed or copied correctly."
elif status == "invalidcharacters":
err += "The address contains invalid characters."
elif status == "versiontoohigh":
err += "The address version is too high. Either you need to upgrade your Bitmessage software or your acquaintance is doing something clever."
elif status == "ripetooshort":
err += "Some data encoded in the address is too short. There might be something wrong with the software of your acquaintance."
elif status == "ripetoolong":
err += "Some data encoded in the address is too long. There might be something wrong with the software of your acquaintance."
elif status == "varintmalformed":
err += "Some data encoded in the address is malformed. There might be something wrong with the software of your acquaintance."
else:
err += "It is unknown what is wrong with the address."
scrollbox(d, unicode(err))
else:
addr = addBMIfNotPresent(addr)
if version > 4 or version <= 1:
set_background_title(d, "Recipient address error")
scrollbox(
d,
unicode(
"Could not understand version number "
+ version
+ "of address"
+ addr
+ "."
),
)
continue
if stream > 1 or stream == 0:
set_background_title(d, "Recipient address error")
scrollbox(
d,
unicode(
"Bitmessage currently only supports stream numbers of 1, unlike as requested for address "
+ addr
+ "."
),
)
continue
if len(shared.connectedHostsList) == 0:
set_background_title(d, "Not connected warning")
scrollbox(
d,
unicode(
"Because you are not currently connected to the network, "
),
)
ackdata = OpenSSL.rand(32)
sqlExecute(
"INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
"",
addr,
ripe,
sender,
subject,
body,
ackdata,
int(time.time()), # sentTime (this will never change)
int(time.time()), # lastActionTime
0, # sleepTill time. This will get set when the POW gets done.
"msgqueued",
0, # retryNumber
"sent",
2, # encodingType
shared.config.getint("bitmessagesettings", "ttl"),
)
shared.workerQueue.put(("sendmessage", addr))
else: # Broadcast
if recv == "":
set_background_title(d, "Empty sender error")
scrollbox(
d, unicode("You must specify an address to send the message from.")
)
else:
ackdata = OpenSSL.rand(32)
recv = BROADCAST_STR
ripe = ""
sqlExecute(
"INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
"",
recv,
ripe,
sender,
subject,
body,
ackdata,
int(time.time()), # sentTime (this will never change)
int(time.time()), # lastActionTime
0, # sleepTill time. This will get set when the POW gets done.
"broadcastqueued",
0, # retryNumber
"sent", # folder
2, # encodingType
shared.config.getint("bitmessagesettings", "ttl"),
)
shared.workerQueue.put(("sendbroadcast", ""))
|
def sendMessage(sender="", recv="", broadcast=None, subject="", body="", reply=False):
if sender == "":
return
d = Dialog(dialog="dialog")
d.set_background_title("Send a message")
if recv == "":
r, t = d.inputbox(
"Recipient address (Cancel to load from the Address Book or leave blank to broadcast)",
10,
60,
)
if r != d.DIALOG_OK:
global menutab
menutab = 6
return
recv = t
if broadcast == None and sender != recv:
r, t = d.radiolist(
"How to send the message?",
choices=[
("1", "Send to one or more specific people", True),
("2", "Broadcast to everyone who is subscribed to your address", False),
],
)
if r != d.DIALOG_OK:
return
broadcast = False
if t == "2": # Broadcast
broadcast = True
if subject == "" or reply:
r, t = d.inputbox("Message subject", width=60, init=subject)
if r != d.DIALOG_OK:
return
subject = t
if body == "" or reply:
r, t = d.inputbox("Message body", 10, 80, init=body)
if r != d.DIALOG_OK:
return
body = t
body = body.replace("\\n", "\n").replace("\\t", "\t")
if not broadcast:
recvlist = []
for i, item in enumerate(recv.replace(",", ";").split(";")):
recvlist.append(item.strip())
list(set(recvlist)) # Remove exact duplicates
for addr in recvlist:
if addr != "":
status, version, stream, ripe = decodeAddress(addr)
if status != "success":
d.set_background_title("Recipient address error")
err = "Could not decode" + addr + " : " + status + "\n\n"
if status == "missingbm":
err += 'Bitmessage addresses should start with "BM-".'
elif status == "checksumfailed":
err += "The address was not typed or copied correctly."
elif status == "invalidcharacters":
err += "The address contains invalid characters."
elif status == "versiontoohigh":
err += "The address version is too high. Either you need to upgrade your Bitmessage software or your acquaintance is doing something clever."
elif status == "ripetooshort":
err += "Some data encoded in the address is too short. There might be something wrong with the software of your acquaintance."
elif status == "ripetoolong":
err += "Some data encoded in the address is too long. There might be something wrong with the software of your acquaintance."
elif status == "varintmalformed":
err += "Some data encoded in the address is malformed. There might be something wrong with the software of your acquaintance."
else:
err += "It is unknown what is wrong with the address."
d.scrollbox(unicode(err), exit_label="Continue")
else:
addr = addBMIfNotPresent(addr)
if version > 4 or version <= 1:
d.set_background_title("Recipient address error")
d.scrollbox(
unicode(
"Could not understand version number "
+ version
+ "of address"
+ addr
+ "."
),
exit_label="Continue",
)
continue
if stream > 1 or stream == 0:
d.set_background_title("Recipient address error")
d.scrollbox(
unicode(
"Bitmessage currently only supports stream numbers of 1, unlike as requested for address "
+ addr
+ "."
),
exit_label="Continue",
)
continue
if len(shared.connectedHostsList) == 0:
d.set_background_title("Not connected warning")
d.scrollbox(
unicode(
"Because you are not currently connected to the network, "
),
exit_label="Continue",
)
ackdata = OpenSSL.rand(32)
sqlExecute(
"INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
"",
addr,
ripe,
sender,
subject,
body,
ackdata,
int(time.time()), # sentTime (this will never change)
int(time.time()), # lastActionTime
0, # sleepTill time. This will get set when the POW gets done.
"msgqueued",
0, # retryNumber
"sent",
2, # encodingType
shared.config.getint("bitmessagesettings", "ttl"),
)
shared.workerQueue.put(("sendmessage", addr))
else: # Broadcast
if recv == "":
d.set_background_title("Empty sender error")
d.scrollbox(
unicode("You must specify an address to send the message from."),
exit_label="Continue",
)
else:
ackdata = OpenSSL.rand(32)
recv = BROADCAST_STR
ripe = ""
sqlExecute(
"INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
"",
recv,
ripe,
sender,
subject,
body,
ackdata,
int(time.time()), # sentTime (this will never change)
int(time.time()), # lastActionTime
0, # sleepTill time. This will get set when the POW gets done.
"broadcastqueued",
0, # retryNumber
"sent", # folder
2, # encodingType
shared.config.getint("bitmessagesettings", "ttl"),
)
shared.workerQueue.put(("sendbroadcast", ""))
|
https://github.com/Bitmessage/PyBitmessage/issues/711
|
Loading existing config files from /home/aaron/.config/PyBitmessage/
An Exception occurred within isOurOperatingSystemLimitedToHavingVeryFewHalfOpenConnections: invalid version number '#1 SMP Debian 3.14.2-1 (2014-04-28)'
Running with curses
Loading inbox messages...
Loading sent messages...
Loading address book...
Traceback (most recent call last):
File "./src/bitmessagemain.py", line 272, in <module>
mainprogram.start()
File "./src/bitmessagemain.py", line 243, in start
bitmessagecurses.runwrapper()
File "/home/aaron/src/bitmessage/src/bitmessagecurses/__init__.py", line 984, in runwrapper
curses.wrapper(run)
File "/usr/lib/python2.7/curses/wrapper.py", line 43, in wrapper
return func(stdscr, *args, **kwds)
File "/home/aaron/src/bitmessage/src/bitmessagecurses/__init__.py", line 1030, in run
handlech(stdscr.getch(), stdscr)
File "/home/aaron/src/bitmessage/src/bitmessagecurses/__init__.py", line 383, in handlech
d.set_background_title("Your Identities Dialog Box")
AttributeError: Dialog instance has no attribute 'set_background_title'
|
AttributeError
|
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Bitmessage", None))
self.inboxSearchLineEdit.setPlaceholderText(
_translate("MainWindow", "Search", None)
)
self.inboxSearchOptionCB.setItemText(0, _translate("MainWindow", "All", None))
self.inboxSearchOptionCB.setItemText(1, _translate("MainWindow", "To", None))
self.inboxSearchOptionCB.setItemText(2, _translate("MainWindow", "From", None))
self.inboxSearchOptionCB.setItemText(3, _translate("MainWindow", "Subject", None))
self.inboxSearchOptionCB.setItemText(4, _translate("MainWindow", "Message", None))
self.tableWidgetInbox.setSortingEnabled(True)
item = self.tableWidgetInbox.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "To", None))
item = self.tableWidgetInbox.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "From", None))
item = self.tableWidgetInbox.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Subject", None))
item = self.tableWidgetInbox.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Received", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.inbox), _translate("MainWindow", "Inbox", None)
)
self.pushButtonLoadFromAddressBook.setText(
_translate("MainWindow", "Load from Address book", None)
)
self.pushButtonFetchNamecoinID.setText(
_translate("MainWindow", "Fetch Namecoin ID", None)
)
self.label_4.setText(_translate("MainWindow", "Message:", None))
self.label_3.setText(_translate("MainWindow", "Subject:", None))
self.radioButtonSpecific.setText(
_translate("MainWindow", "Send to one or more specific people", None)
)
self.textEditMessage.setHtml(
_translate(
"MainWindow",
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd">\n'
'<html><head><meta name="qrichtext" content="1" /><style type="text/css">\n'
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
'<p style="-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><br /></p></body></html>',
None,
)
)
self.label.setText(_translate("MainWindow", "To:", None))
self.label_2.setText(_translate("MainWindow", "From:", None))
self.radioButtonBroadcast.setText(
_translate(
"MainWindow",
"Broadcast to everyone who is subscribed to your address",
None,
)
)
self.pushButtonSend.setText(_translate("MainWindow", "Send", None))
self.labelSendBroadcastWarning.setText(
_translate(
"MainWindow",
"Be aware that broadcasts are only encrypted with your address. Anyone who knows your address can read them.",
None,
)
)
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.send), _translate("MainWindow", "Send", None)
)
self.sentSearchLineEdit.setPlaceholderText(_translate("MainWindow", "Search", None))
self.sentSearchOptionCB.setItemText(0, _translate("MainWindow", "All", None))
self.sentSearchOptionCB.setItemText(1, _translate("MainWindow", "To", None))
self.sentSearchOptionCB.setItemText(2, _translate("MainWindow", "From", None))
self.sentSearchOptionCB.setItemText(3, _translate("MainWindow", "Subject", None))
self.sentSearchOptionCB.setItemText(4, _translate("MainWindow", "Message", None))
self.tableWidgetSent.setSortingEnabled(True)
item = self.tableWidgetSent.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "To", None))
item = self.tableWidgetSent.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "From", None))
item = self.tableWidgetSent.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Subject", None))
item = self.tableWidgetSent.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Status", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.sent), _translate("MainWindow", "Sent", None)
)
self.pushButtonNewAddress.setText(_translate("MainWindow", "New", None))
self.tableWidgetYourIdentities.setSortingEnabled(True)
item = self.tableWidgetYourIdentities.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Label (not shown to anyone)", None))
item = self.tableWidgetYourIdentities.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
item = self.tableWidgetYourIdentities.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Stream", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.youridentities),
_translate("MainWindow", "Your Identities", None),
)
self.label_5.setText(
_translate(
"MainWindow",
"Here you can subscribe to 'broadcast messages' that are sent by other users. Messages will appear in your Inbox. Addresses here override those on the Blacklist tab.",
None,
)
)
self.pushButtonAddSubscription.setText(
_translate("MainWindow", "Add new Subscription", None)
)
self.tableWidgetSubscriptions.setSortingEnabled(True)
item = self.tableWidgetSubscriptions.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Label", None))
item = self.tableWidgetSubscriptions.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.subscriptions),
_translate("MainWindow", "Subscriptions", None),
)
self.label_6.setText(
_translate(
"MainWindow",
"The Address book is useful for adding names or labels to other people's Bitmessage addresses so that you can recognize them more easily in your inbox. You can add entries here using the 'Add' button, or from your inbox by right-clicking on a message.",
None,
)
)
self.pushButtonAddAddressBook.setText(
_translate("MainWindow", "Add new entry", None)
)
self.tableWidgetAddressBook.setSortingEnabled(True)
item = self.tableWidgetAddressBook.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Name or Label", None))
item = self.tableWidgetAddressBook.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.addressbook),
_translate("MainWindow", "Address Book", None),
)
self.radioButtonBlacklist.setText(
_translate(
"MainWindow",
"Use a Blacklist (Allow all incoming messages except those on the Blacklist)",
None,
)
)
self.radioButtonWhitelist.setText(
_translate(
"MainWindow",
"Use a Whitelist (Block all incoming messages except those on the Whitelist)",
None,
)
)
self.pushButtonAddBlacklist.setText(_translate("MainWindow", "Add new entry", None))
self.tableWidgetBlacklist.setSortingEnabled(True)
item = self.tableWidgetBlacklist.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Name or Label", None))
item = self.tableWidgetBlacklist.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.blackwhitelist),
_translate("MainWindow", "Blacklist", None),
)
item = self.tableWidgetConnectionCount.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Stream #", None))
item = self.tableWidgetConnectionCount.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Connections", None))
self.labelTotalConnections.setText(
_translate("MainWindow", "Total connections:", None)
)
self.labelStartupTime.setText(_translate("MainWindow", "Since startup:", None))
self.labelMessageCount.setText(
_translate("MainWindow", "Processed 0 person-to-person messages.", None)
)
self.labelPubkeyCount.setText(
_translate("MainWindow", "Processed 0 public keys.", None)
)
self.labelBroadcastCount.setText(
_translate("MainWindow", "Processed 0 broadcasts.", None)
)
self.labelLookupsPerSecond.setText(
_translate("MainWindow", "Inventory lookups per second: 0", None)
)
self.labelBytesRecvCount.setText(_translate("MainWindow", "Down: 0 KB/s", None))
self.labelBytesSentCount.setText(_translate("MainWindow", "Up: 0 KB/s", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.networkstatus),
_translate("MainWindow", "Network Status", None),
)
self.menuFile.setTitle(_translate("MainWindow", "File", None))
self.menuSettings.setTitle(_translate("MainWindow", "Settings", None))
self.menuHelp.setTitle(_translate("MainWindow", "Help", None))
self.actionImport_keys.setText(_translate("MainWindow", "Import keys", None))
self.actionManageKeys.setText(_translate("MainWindow", "Manage keys", None))
self.actionExit.setText(_translate("MainWindow", "Quit", None))
self.actionExit.setShortcut(_translate("MainWindow", "Ctrl+Q", None))
self.actionHelp.setText(_translate("MainWindow", "Help", None))
self.actionHelp.setShortcut(_translate("MainWindow", "F1", None))
self.actionAbout.setText(_translate("MainWindow", "About", None))
self.actionSettings.setText(_translate("MainWindow", "Settings", None))
self.actionRegenerateDeterministicAddresses.setText(
_translate("MainWindow", "Regenerate deterministic addresses", None)
)
self.actionDeleteAllTrashedMessages.setText(
_translate("MainWindow", "Delete all trashed messages", None)
)
self.actionJoinChan.setText(_translate("MainWindow", "Join / Create chan", None))
|
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Bitmessage", None))
self.inboxSearchLineEdit.setPlaceholderText(
_translate("MainWindow", "Search", None)
)
self.inboxSearchOptionCB.setItemText(0, _translate("MainWindow", "All", None))
self.inboxSearchOptionCB.setItemText(1, _translate("MainWindow", "To", None))
self.inboxSearchOptionCB.setItemText(2, _translate("MainWindow", "From", None))
self.inboxSearchOptionCB.setItemText(3, _translate("MainWindow", "Subject", None))
self.inboxSearchOptionCB.setItemText(4, _translate("MainWindow", "Message", None))
self.tableWidgetInbox.setSortingEnabled(True)
item = self.tableWidgetInbox.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "To", None))
item = self.tableWidgetInbox.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "From", None))
item = self.tableWidgetInbox.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Subject", None))
item = self.tableWidgetInbox.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Received", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.inbox), _translate("MainWindow", "Inbox", None)
)
self.pushButtonLoadFromAddressBook.setText(
_translate("MainWindow", "Load from Address book", None)
)
self.pushButtonFetchNamecoinID.setText(
_translate("MainWindow", "Fetch Namecoin ID", None)
)
self.label_4.setText(_translate("MainWindow", "Message:", None))
self.label_3.setText(_translate("MainWindow", "Subject:", None))
self.radioButtonSpecific.setText(
_translate("MainWindow", "Send to one or more specific people", None)
)
self.textEditMessage.setHtml(
_translate(
"MainWindow",
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd">\n'
'<html><head><meta name="qrichtext" content="1" /><style type="text/css">\n'
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
'<p style="-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><br /></p></body></html>',
None,
)
)
self.label.setText(_translate("MainWindow", "To:", None))
self.label_2.setText(_translate("MainWindow", "From:", None))
self.radioButtonBroadcast.setText(
_translate(
"MainWindow",
"Broadcast to everyone who is subscribed to your address",
None,
)
)
self.pushButtonSend.setText(_translate("MainWindow", "Send", None))
self.labelSendBroadcastWarning.setText(
_translate(
"MainWindow",
"Be aware that broadcasts are only encrypted with your address. Anyone who knows your address can read them.",
None,
)
)
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.send), _translate("MainWindow", "Send", None)
)
self.sentSearchLineEdit.setPlaceholderText(_translate("MainWindow", "Search", None))
self.sentSearchOptionCB.setItemText(0, _translate("MainWindow", "All", None))
self.sentSearchOptionCB.setItemText(1, _translate("MainWindow", "To", None))
self.sentSearchOptionCB.setItemText(2, _translate("MainWindow", "From", None))
self.sentSearchOptionCB.setItemText(3, _translate("MainWindow", "Subject", None))
self.sentSearchOptionCB.setItemText(4, _translate("MainWindow", "Message", None))
self.tableWidgetSent.setSortingEnabled(True)
item = self.tableWidgetSent.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "To", None))
item = self.tableWidgetSent.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "From", None))
item = self.tableWidgetSent.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Subject", None))
item = self.tableWidgetSent.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Status", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.sent), _translate("MainWindow", "Sent", None)
)
self.pushButtonNewAddress.setText(_translate("MainWindow", "New", None))
self.tableWidgetYourIdentities.setSortingEnabled(True)
item = self.tableWidgetYourIdentities.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Label (not shown to anyone)", None))
item = self.tableWidgetYourIdentities.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
item = self.tableWidgetYourIdentities.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Stream", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.youridentities),
_translate("MainWindow", "Your Identities", None),
)
self.label_5.setText(
_translate(
"MainWindow",
"Here you can subscribe to 'broadcast messages' that are sent by other users. Messages will appear in your Inbox. Addresses here override those on the Blacklist tab.",
None,
)
)
self.pushButtonAddSubscription.setText(
_translate("MainWindow", "Add new Subscription", None)
)
self.tableWidgetSubscriptions.setSortingEnabled(True)
item = self.tableWidgetSubscriptions.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Label", None))
item = self.tableWidgetSubscriptions.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.subscriptions),
_translate("MainWindow", "Subscriptions", None),
)
self.label_6.setText(
_translate(
"MainWindow",
"The Address book is useful for adding names or labels to other people's Bitmessage addresses so that you can recognize them more easily in your inbox. You can add entries here using the 'Add' button, or from your inbox by right-clicking on a message.",
None,
)
)
self.pushButtonAddAddressBook.setText(
_translate("MainWindow", "Add new entry", None)
)
self.tableWidgetAddressBook.setSortingEnabled(True)
item = self.tableWidgetAddressBook.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Name or Label", None))
item = self.tableWidgetAddressBook.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.addressbook),
_translate("MainWindow", "Address Book", None),
)
self.radioButtonBlacklist.setText(
_translate(
"MainWindow",
"Use a Blacklist (Allow all incoming messages except those on the Blacklist)",
None,
)
)
self.radioButtonWhitelist.setText(
_translate(
"MainWindow",
"Use a Whitelist (Block all incoming messages except those on the Whitelist)",
None,
)
)
self.pushButtonAddBlacklist.setText(_translate("MainWindow", "Add new entry", None))
self.tableWidgetBlacklist.setSortingEnabled(True)
item = self.tableWidgetBlacklist.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Name or Label", None))
item = self.tableWidgetBlacklist.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.blackwhitelist),
_translate("MainWindow", "Blacklist", None),
)
item = self.tableWidgetConnectionCount.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Stream #", None))
item = self.tableWidgetConnectionCount.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Connections", None))
self.labelTotalConnections.setText(
_translate("MainWindow", "Total connections: 0", None)
)
self.labelStartupTime.setText(
_translate("MainWindow", "Since startup at asdf:", None)
)
self.labelMessageCount.setText(
_translate("MainWindow", "Processed 0 person-to-person message.", None)
)
self.labelPubkeyCount.setText(
_translate("MainWindow", "Processed 0 public key.", None)
)
self.labelBroadcastCount.setText(
_translate("MainWindow", "Processed 0 broadcast.", None)
)
self.labelLookupsPerSecond.setText(
_translate("MainWindow", "Inventory lookups per second: 0", None)
)
self.labelBytesRecvCount.setText(_translate("MainWindow", "Down: 0 KB/s", None))
self.labelBytesSentCount.setText(_translate("MainWindow", "Up: 0 KB/s", None))
self.tabWidget.setTabText(
self.tabWidget.indexOf(self.networkstatus),
_translate("MainWindow", "Network Status", None),
)
self.menuFile.setTitle(_translate("MainWindow", "File", None))
self.menuSettings.setTitle(_translate("MainWindow", "Settings", None))
self.menuHelp.setTitle(_translate("MainWindow", "Help", None))
self.actionImport_keys.setText(_translate("MainWindow", "Import keys", None))
self.actionManageKeys.setText(_translate("MainWindow", "Manage keys", None))
self.actionExit.setText(_translate("MainWindow", "Quit", None))
self.actionExit.setShortcut(_translate("MainWindow", "Ctrl+Q", None))
self.actionHelp.setText(_translate("MainWindow", "Help", None))
self.actionHelp.setShortcut(_translate("MainWindow", "F1", None))
self.actionAbout.setText(_translate("MainWindow", "About", None))
self.actionSettings.setText(_translate("MainWindow", "Settings", None))
self.actionRegenerateDeterministicAddresses.setText(
_translate("MainWindow", "Regenerate deterministic addresses", None)
)
self.actionDeleteAllTrashedMessages.setText(
_translate("MainWindow", "Delete all trashed messages", None)
)
self.actionJoinChan.setText(_translate("MainWindow", "Join / Create chan", None))
|
https://github.com/Bitmessage/PyBitmessage/issues/761
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "C:\Python27\lib\threading.py", line 810, in __bootstrap_inner
self.run()
File "C:\AdminRoot\TBM\PyBitmessage\src\class_addressGenerator.py", line 101,in run
print 'Address generator calculated', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix, 'addresses at', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix / (time.time() - startTime), 'addresses per second before finding one with the correct ripe-prefix.'
ZeroDivisionError: float division by zero
|
ZeroDivisionError
|
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection(request.url, proxies)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = (
"Invalid timeout {0}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout)
)
raise ValueError(err)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
)
# Send the request.
else:
if hasattr(conn, "proxy_pool"):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
low_conn.putrequest(request.method, url, skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode("utf-8"))
low_conn.send(b"\r\n")
low_conn.send(i)
low_conn.send(b"\r\n")
low_conn.send(b"0\r\n\r\n")
# Receive the response from the server
try:
# For Python 2.7+ versions, use buffering of HTTP
# responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 2.6 versions and back
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False,
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)
if isinstance(e.reason, _ProxyError):
raise ProxyError(e, request=request)
if isinstance(e.reason, _SSLError):
# This branch is for urllib3 v1.22 and later.
raise SSLError(e, request=request)
raise ConnectionError(e, request=request)
except ClosedPoolError as e:
raise ConnectionError(e, request=request)
except _ProxyError as e:
raise ProxyError(e)
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
# This branch is for urllib3 versions earlier than v1.22
raise SSLError(e, request=request)
elif isinstance(e, ReadTimeoutError):
raise ReadTimeout(e, request=request)
else:
raise
return self.build_response(request, resp)
|
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
conn = self.get_connection(request.url, proxies)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = (
"Invalid timeout {0}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout)
)
raise ValueError(err)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
)
# Send the request.
else:
if hasattr(conn, "proxy_pool"):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
low_conn.putrequest(request.method, url, skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode("utf-8"))
low_conn.send(b"\r\n")
low_conn.send(i)
low_conn.send(b"\r\n")
low_conn.send(b"0\r\n\r\n")
# Receive the response from the server
try:
# For Python 2.7+ versions, use buffering of HTTP
# responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 2.6 versions and back
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False,
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)
if isinstance(e.reason, _ProxyError):
raise ProxyError(e, request=request)
if isinstance(e.reason, _SSLError):
# This branch is for urllib3 v1.22 and later.
raise SSLError(e, request=request)
raise ConnectionError(e, request=request)
except ClosedPoolError as e:
raise ConnectionError(e, request=request)
except _ProxyError as e:
raise ProxyError(e)
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
# This branch is for urllib3 versions earlier than v1.22
raise SSLError(e, request=request)
elif isinstance(e, ReadTimeoutError):
raise ReadTimeout(e, request=request)
else:
raise
return self.build_response(request, resp)
|
https://github.com/psf/requests/issues/4746
|
import requests
test = requests.get("https://www.fossil.com/us/en/account-dashboard/registered-products.html")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/requests/api.py", line 72, in get
return request('get', url, params=params, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/requests/api.py", line 58, in request
return session.request(method=method, url=url, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/requests/sessions.py", line 512, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.7/dist-packages/requests/sessions.py", line 644, in send
history = [resp for resp in gen] if allow_redirects else []
File "/usr/local/lib/python2.7/dist-packages/requests/sessions.py", line 222, in resolve_redirects
**adapter_kwargs
File "/usr/local/lib/python2.7/dist-packages/requests/sessions.py", line 622, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/requests/adapters.py", line 410, in send
conn = self.get_connection(request.url, proxies)
File "/usr/local/lib/python2.7/dist-packages/requests/adapters.py", line 314, in get_connection
conn = self.poolmanager.connection_from_url(url)
File "/usr/local/lib/python2.7/dist-packages/urllib3/poolmanager.py", line 277, in connection_from_url
u = parse_url(url)
File "/usr/local/lib/python2.7/dist-packages/urllib3/util/url.py", line 199, in parse_url
raise LocationParseError(url)
urllib3.exceptions.LocationParseError: Failed to parse: www.fossil.com:-1
|
urllib3.exceptions.LocationParseError
|
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
if r.encoding is None:
for item in iterator:
yield item
return
decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace")
for chunk in iterator:
rv = decoder.decode(chunk)
if rv:
yield rv
rv = decoder.decode(b"", final=True)
if rv:
yield rv
|
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
encoding = r.encoding
if encoding is None:
encoding = r.apparent_encoding
try:
decoder = codecs.getincrementaldecoder(encoding)(errors="replace")
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
raise UnicodeError("Unable to decode contents with encoding %s." % encoding)
for chunk in iterator:
rv = decoder.decode(chunk)
if rv:
yield rv
rv = decoder.decode(b"", final=True)
if rv:
yield rv
|
https://github.com/psf/requests/issues/3481
|
$ ./bin/python case.py
Traceback (most recent call last):
File "case.py", line 6, in <module>
for line in response.iter_lines(chunk_size=30, decode_unicode=True):
File "/Users/jone/temp/requests-stream/lib/python2.7/site-packages/requests/models.py", line 720, in iter_lines
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
File "/Users/jone/temp/requests-stream/lib/python2.7/site-packages/requests/utils.py", line 374, in stream_decode_response_unicode
for chunk in iterator:
File "/Users/jone/temp/requests-stream/lib/python2.7/site-packages/requests/models.py", line 676, in generate
for chunk in self.raw.stream(chunk_size, decode_content=True):
File "/Users/jone/temp/requests-stream/lib/python2.7/site-packages/requests/packages/urllib3/response.py", line 353, in stream
for line in self.read_chunked(amt, decode_content=decode_content):
File "/Users/jone/temp/requests-stream/lib/python2.7/site-packages/requests/packages/urllib3/response.py", line 521, in read_chunked
line = self._fp.fp.readline()
AttributeError: 'NoneType' object has no attribute 'readline'
|
AttributeError
|
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return to_native_string(data)
elif hasattr(data, "read"):
return data
elif hasattr(data, "__iter__"):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(
k.encode("utf-8") if isinstance(k, str) else k,
v.encode("utf-8") if isinstance(v, str) else v,
)
)
return urlencode(result, doseq=True)
else:
return data
|
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, "read"):
return data
elif hasattr(data, "__iter__"):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(
k.encode("utf-8") if isinstance(k, str) else k,
v.encode("utf-8") if isinstance(v, str) else v,
)
)
return urlencode(result, doseq=True)
else:
return data
|
https://github.com/psf/requests/issues/2844
|
Traceback (most recent call last):
... skip ...
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
File "C:\Python35\lib\urllib\parse.py", line 383, in urlunparse
_coerce_args(*components))
File "C:\Python35\lib\urllib\parse.py", line 111, in _coerce_args
raise TypeError("Cannot mix str and non-str arguments")
TypeError: Cannot mix str and non-str arguments
|
TypeError
|
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
#: We're unable to blindy call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
if isinstance(url, bytes):
url = url.decode("utf8")
else:
url = unicode(url) if is_py2 else str(url)
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ":" in url and not url.lower().startswith("http"):
self.url = url
return
# Support for unicode domain names and paths.
try:
scheme, auth, host, port, path, query, fragment = parse_url(url)
except LocationParseError as e:
raise InvalidURL(*e.args)
if not scheme:
raise MissingSchema(
"Invalid URL {0!r}: No schema supplied. "
"Perhaps you meant http://{0}?".format(to_native_string(url, "utf8"))
)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode("idna").decode("utf-8")
except UnicodeError:
raise InvalidURL("URL has an invalid label.")
# Carefully reconstruct the network location
netloc = auth or ""
if netloc:
netloc += "@"
netloc += host
if port:
netloc += ":" + str(port)
# Bare domains aren't valid URLs.
if not path:
path = "/"
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode("utf-8")
if isinstance(netloc, str):
netloc = netloc.encode("utf-8")
if isinstance(path, str):
path = path.encode("utf-8")
if isinstance(query, str):
query = query.encode("utf-8")
if isinstance(fragment, str):
fragment = fragment.encode("utf-8")
enc_params = self._encode_params(params)
if enc_params:
if query:
query = "%s&%s" % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
|
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
#: We're unable to blindy call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
if isinstance(url, bytes):
url = url.decode("utf8")
else:
url = unicode(url) if is_py2 else str(url)
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ":" in url and not url.lower().startswith("http"):
self.url = url
return
# Support for unicode domain names and paths.
try:
scheme, auth, host, port, path, query, fragment = parse_url(url)
except LocationParseError as e:
raise InvalidURL(*e.args)
if not scheme:
raise MissingSchema(
"Invalid URL {0!r}: No schema supplied. "
"Perhaps you meant http://{0}?".format(url)
)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode("idna").decode("utf-8")
except UnicodeError:
raise InvalidURL("URL has an invalid label.")
# Carefully reconstruct the network location
netloc = auth or ""
if netloc:
netloc += "@"
netloc += host
if port:
netloc += ":" + str(port)
# Bare domains aren't valid URLs.
if not path:
path = "/"
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode("utf-8")
if isinstance(netloc, str):
netloc = netloc.encode("utf-8")
if isinstance(path, str):
path = path.encode("utf-8")
if isinstance(query, str):
query = query.encode("utf-8")
if isinstance(fragment, str):
fragment = fragment.encode("utf-8")
enc_params = self._encode_params(params)
if enc_params:
if query:
query = "%s&%s" % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
|
https://github.com/psf/requests/issues/2540
|
---------------------------------------------------------------------------
UnicodeEncodeError Traceback (most recent call last)
<ipython-input-3-bf1281984b3a> in <module>()
----> 1 requests.get(url)
/Library/Python/2.7/site-packages/requests-2.6.0-py2.7.egg/requests/api.pyc in get(url, **kwargs)
66
67 kwargs.setdefault('allow_redirects', True)
---> 68 return request('get', url, **kwargs)
69
70
/Library/Python/2.7/site-packages/requests-2.6.0-py2.7.egg/requests/api.pyc in request(method, url, **kwargs)
48
49 session = sessions.Session()
---> 50 response = session.request(method=method, url=url, **kwargs)
51 # By explicitly closing the session, we avoid leaving sockets open which
52 # can trigger a ResourceWarning in some cases, and look like a memory leak
/Library/Python/2.7/site-packages/requests-2.6.0-py2.7.egg/requests/sessions.pyc in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)
448 hooks = hooks,
449 )
--> 450 prep = self.prepare_request(req)
451
452 proxies = proxies or {}
/Library/Python/2.7/site-packages/requests-2.6.0-py2.7.egg/requests/sessions.pyc in prepare_request(self, request)
379 auth=merge_setting(auth, self.auth),
380 cookies=merged_cookies,
--> 381 hooks=merge_hooks(request.hooks, self.hooks),
382 )
383 return p
/Library/Python/2.7/site-packages/requests-2.6.0-py2.7.egg/requests/models.pyc in prepare(self, method, url, headers, files, data, params, auth, cookies, hooks, json)
302
303 self.prepare_method(method)
--> 304 self.prepare_url(url, params)
305 self.prepare_headers(headers)
306 self.prepare_cookies(cookies)
/Library/Python/2.7/site-packages/requests-2.6.0-py2.7.egg/requests/models.pyc in prepare_url(self, url, params)
359 if not scheme:
360 raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
--> 361 "Perhaps you meant http://{0}?".format(url))
362
363 if not host:
UnicodeEncodeError: 'ascii' codec can't encode character u'\xe4' in position 1: ordinal not in range(128)
|
UnicodeEncodeError
|
def request(
self,
method,
url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=None,
allow_redirects=True,
proxies=None,
hooks=None,
stream=None,
verify=None,
cert=None,
):
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query
string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the
:class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the
:class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the
:class:`Request`.
:param files: (optional) Dictionary of 'filename': file-like-objects
for multipart encoding upload.
:param auth: (optional) Auth tuple or callable to enable
Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the
request.
:param allow_redirects: (optional) Boolean. Set to True by default.
:param proxies: (optional) Dictionary mapping protocol to the URL of
the proxy.
:param stream: (optional) whether to immediately download the response
content. Defaults to ``False``.
:param verify: (optional) if ``True``, the SSL cert will be verified.
A CA_BUNDLE path can also be provided.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
"""
method = builtin_str(method)
# Create the Request.
req = Request(
method=method.upper(),
url=url,
headers=headers,
files=files,
data=data or {},
params=params or {},
auth=auth,
cookies=cookies,
hooks=hooks,
)
prep = self.prepare_request(req)
# Add param cookies to session cookies
if isinstance(cookies, dict):
self.cookies = cookiejar_from_dict(
cookies, cookiejar=self.cookies, overwrite=False
)
elif isinstance(cookies, cookielib.CookieJar):
self.cookies.update(cookies)
proxies = proxies or {}
# Gather clues from the surrounding environment.
if self.trust_env:
# Set environment's proxies.
env_proxies = get_environ_proxies(url) or {}
for k, v in env_proxies.items():
proxies.setdefault(k, v)
# Look for configuration.
if not verify and verify is not False:
verify = os.environ.get("REQUESTS_CA_BUNDLE")
# Curl compatibility.
if not verify and verify is not False:
verify = os.environ.get("CURL_CA_BUNDLE")
# Merge all the kwargs.
proxies = merge_setting(proxies, self.proxies)
stream = merge_setting(stream, self.stream)
verify = merge_setting(verify, self.verify)
cert = merge_setting(cert, self.cert)
# Send the request.
send_kwargs = {
"stream": stream,
"timeout": timeout,
"verify": verify,
"cert": cert,
"proxies": proxies,
"allow_redirects": allow_redirects,
}
resp = self.send(prep, **send_kwargs)
return resp
|
def request(
self,
method,
url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=None,
allow_redirects=True,
proxies=None,
hooks=None,
stream=None,
verify=None,
cert=None,
):
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query
string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the
:class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the
:class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the
:class:`Request`.
:param files: (optional) Dictionary of 'filename': file-like-objects
for multipart encoding upload.
:param auth: (optional) Auth tuple or callable to enable
Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the
request.
:param allow_redirects: (optional) Boolean. Set to True by default.
:param proxies: (optional) Dictionary mapping protocol to the URL of
the proxy.
:param stream: (optional) whether to immediately download the response
content. Defaults to ``False``.
:param verify: (optional) if ``True``, the SSL cert will be verified.
A CA_BUNDLE path can also be provided.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
"""
method = builtin_str(method)
# Create the Request.
req = Request(
method=method.upper(),
url=url,
headers=headers,
files=files,
data=data or {},
params=params or {},
auth=auth,
cookies=cookies,
hooks=hooks,
)
prep = self.prepare_request(req)
# Add param cookies to session cookies
self.cookies = cookiejar_from_dict(cookies, cookiejar=self.cookies, overwrite=False)
proxies = proxies or {}
# Gather clues from the surrounding environment.
if self.trust_env:
# Set environment's proxies.
env_proxies = get_environ_proxies(url) or {}
for k, v in env_proxies.items():
proxies.setdefault(k, v)
# Look for configuration.
if not verify and verify is not False:
verify = os.environ.get("REQUESTS_CA_BUNDLE")
# Curl compatibility.
if not verify and verify is not False:
verify = os.environ.get("CURL_CA_BUNDLE")
# Merge all the kwargs.
proxies = merge_setting(proxies, self.proxies)
stream = merge_setting(stream, self.stream)
verify = merge_setting(verify, self.verify)
cert = merge_setting(cert, self.cert)
# Send the request.
send_kwargs = {
"stream": stream,
"timeout": timeout,
"verify": verify,
"cert": cert,
"proxies": proxies,
"allow_redirects": allow_redirects,
}
resp = self.send(prep, **send_kwargs)
return resp
|
https://github.com/psf/requests/issues/1711
|
Traceback (most recent call last):
File "rtest.py", line 16, in <module>
requests.get(URL, cookies=cookiejar)
File "/tmp/rtestenv/lib/python2.7/site-packages/requests/api.py", line 55, in get
return request('get', url, **kwargs)
File "/tmp/rtestenv/lib/python2.7/site-packages/requests/api.py", line 44, in request
return session.request(method=method, url=url, **kwargs)
File "/tmp/rtestenv/lib/python2.7/site-packages/requests/sessions.py", line 327, in request
self.cookies = cookiejar_from_dict(cookies, cookiejar=self.cookies, overwrite=False)
File "/tmp/rtestenv/lib/python2.7/site-packages/requests/cookies.py", line 410, in cookiejar_from_dict
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
AttributeError: MozillaCookieJar instance has no attribute '__getitem__'
|
AttributeError
|
def prepare_request(self, request):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
transmission and returns it. The :class:`PreparedRequest` has settings
merged from the :class:`Request <Request>` instance and those of the
:class:`Session`.
:param request: :class:`Request` instance to prepare with this
session's settings.
"""
cookies = request.cookies or {}
# Bootstrap CookieJar.
if not isinstance(cookies, cookielib.CookieJar):
cookies = cookiejar_from_dict(cookies)
# Merge with session cookies
merged_cookies = merge_cookies(
merge_cookies(RequestsCookieJar(), self.cookies), cookies
)
# Set environment's basic authentication if not explicitly set.
auth = request.auth
if self.trust_env and not auth and not self.auth:
auth = get_netrc_auth(request.url)
p = PreparedRequest()
p.prepare(
method=request.method.upper(),
url=request.url,
files=request.files,
data=request.data,
headers=merge_setting(
request.headers, self.headers, dict_class=CaseInsensitiveDict
),
params=merge_setting(request.params, self.params),
auth=merge_setting(auth, self.auth),
cookies=merged_cookies,
hooks=merge_setting(request.hooks, self.hooks),
)
return p
|
def prepare_request(self, request):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
transmission and returns it. The :class:`PreparedRequest` has settings
merged from the :class:`Request <Request>` instance and those of the
:class:`Session`.
:param request: :class:`Request` instance to prepare with this
session's settings.
"""
cookies = request.cookies or {}
# Bootstrap CookieJar.
if not isinstance(cookies, cookielib.CookieJar):
cookies = cookiejar_from_dict(cookies)
# Merge with session cookies
merged_cookies = RequestsCookieJar()
merged_cookies.update(self.cookies)
merged_cookies.update(cookies)
# Set environment's basic authentication if not explicitly set.
auth = request.auth
if self.trust_env and not auth and not self.auth:
auth = get_netrc_auth(request.url)
p = PreparedRequest()
p.prepare(
method=request.method.upper(),
url=request.url,
files=request.files,
data=request.data,
headers=merge_setting(
request.headers, self.headers, dict_class=CaseInsensitiveDict
),
params=merge_setting(request.params, self.params),
auth=merge_setting(auth, self.auth),
cookies=merged_cookies,
hooks=merge_setting(request.hooks, self.hooks),
)
return p
|
https://github.com/psf/requests/issues/1711
|
Traceback (most recent call last):
File "rtest.py", line 16, in <module>
requests.get(URL, cookies=cookiejar)
File "/tmp/rtestenv/lib/python2.7/site-packages/requests/api.py", line 55, in get
return request('get', url, **kwargs)
File "/tmp/rtestenv/lib/python2.7/site-packages/requests/api.py", line 44, in request
return session.request(method=method, url=url, **kwargs)
File "/tmp/rtestenv/lib/python2.7/site-packages/requests/sessions.py", line 327, in request
self.cookies = cookiejar_from_dict(cookies, cookiejar=self.cookies, overwrite=False)
File "/tmp/rtestenv/lib/python2.7/site-packages/requests/cookies.py", line 410, in cookiejar_from_dict
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
AttributeError: MozillaCookieJar instance has no attribute '__getitem__'
|
AttributeError
|
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
if r.encoding is None:
for item in iterator:
yield item
return
decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace")
for chunk in iterator:
rv = decoder.decode(chunk)
if rv:
yield rv
rv = decoder.decode(b"", final=True)
if rv:
yield rv
|
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
if r.encoding is None:
for item in iterator:
yield item
return
decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace")
for chunk in iterator:
rv = decoder.decode(chunk)
if rv:
yield rv
rv = decoder.decode("", final=True)
if rv:
yield rv
|
https://github.com/psf/requests/issues/1434
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-21-86c335a3edfc> in <module>()
----> 1 rv = decoder.decode('', final=True)
/Users/chen/Virtualenvs/python3Env/lib/python3.3/codecs.py in decode(self, input, final)
297 def decode(self, input, final=False):
298 # decode input (taking the buffer into account)
--> 299 data = self.buffer + input
300 (result, consumed) = self._buffer_decode(data, self.errors, final)
301 # keep undecoded input until the next call
TypeError: can't concat bytes to str
|
TypeError
|
def text(self):
"""Content of the response, in unicode.
if Response.encoding is None and chardet module is available, encoding
will be guessed.
"""
# Try charset from content-type
content = None
encoding = self.encoding
# Fallback to auto-detected encoding if chardet is available.
if self.encoding is None:
encoding = self._detected_encoding()
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors="replace")
except LookupError:
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# So we try blindly encoding.
content = str(self.content, errors="replace")
except (UnicodeError, TypeError):
pass
return content
|
def text(self):
"""Content of the response, in unicode.
if Response.encoding is None and chardet module is available, encoding
will be guessed.
"""
# Try charset from content-type
content = None
encoding = self.encoding
# Fallback to auto-detected encoding if chardet is available.
if self.encoding is None:
encoding = self._detected_encoding()
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors="replace")
except (UnicodeError, TypeError):
pass
return content
|
https://github.com/psf/requests/issues/338
|
url = 'http://dilbert.com'
len(urllib2.urlopen(url).read())
30194
len(requests.get(url).content)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "build/bdist.macosx-10.7-intel/egg/requests/models.py", line 713, in content
LookupError: unknown encoding: utf-8lias
|
LookupError
|
def request_binding(self, guest_id, dst_ip, ssh_port, telnet_port):
self.lock.acquire()
try:
# see if binding is already created
if guest_id in self.bindings:
# increase connected
self.bindings[guest_id][0] += 1
return self.bindings[guest_id][1]._realPortNumber, self.bindings[guest_id][
2
]._realPortNumber
else:
nat_ssh = reactor.listenTCP(
0, ServerFactory(dst_ip, ssh_port), interface="0.0.0.0"
)
nat_telnet = reactor.listenTCP(
0, ServerFactory(dst_ip, telnet_port), interface="0.0.0.0"
)
self.bindings[guest_id] = [1, nat_ssh, nat_telnet]
return nat_ssh._realPortNumber, nat_telnet._realPortNumber
finally:
self.lock.release()
|
def request_binding(self, guest_id, dst_ip, ssh_port, telnet_port):
self.lock.acquire()
try:
# see if binding is already created
if dst_ip in self.bindings:
# increase connected
self.bindings[guest_id][0] += 1
return self.bindings[guest_id][1]._realPortNumber, self.bindings[guest_id][
2
]._realPortNumber
else:
nat_ssh = reactor.listenTCP(
0, ServerFactory(dst_ip, ssh_port), interface="0.0.0.0"
)
nat_telnet = reactor.listenTCP(
0, ServerFactory(dst_ip, telnet_port), interface="0.0.0.0"
)
self.bindings[guest_id] = [0, nat_ssh, nat_telnet]
return nat_ssh._realPortNumber, nat_telnet._realPortNumber
finally:
self.lock.release()
|
https://github.com/cowrie/cowrie/issues/1361
|
2020-06-08T14:16:22.505410Z [backend_pool.nat.ServerFactory#info] Starting factory <backend_pool.nat.ServerFactory object at 0x7fc035e2ce80>
2020-06-08T14:16:22.507706Z [backend_pool.nat.ClientFactory#info] Starting factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:22.508111Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:22.508193Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:22.508593Z [backend_pool.nat.ClientFactory#info] Stopping factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:23.235361Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:23.235508Z [twisted.internet.tcp.Port#info] EMFILE recovery: Closed socket from ('172.16.1.6', 42942)
2020-06-08T14:16:23.235537Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:24.463433Z [-] Unhandled Error
Traceback (most recent call last):
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 399, in startReactor
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 312, in runReactorWithLogging
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1283, in run
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1292, in mainLoop
--- <exception caught here> ---
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 913, in runUntilCurrent
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 280, in producer_loop
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 190, in __producer_check_health
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 151, in has_connectivity
File "/srv/cowrie/cowrie-github/src/backend_pool/util.py", line 18, in nmap_port
File "/usr/lib/python3.6/subprocess.py", line 423, in run
File "/usr/lib/python3.6/subprocess.py", line 687, in __init__
File "/usr/lib/python3.6/subprocess.py", line 1197, in _get_handles
builtins.OSError: [Errno 24] Too many open files
|
builtins.OSError
|
def free_binding(self, guest_id):
self.lock.acquire()
try:
self.bindings[guest_id][0] -= 1
# stop listening if no one is connected
if self.bindings[guest_id][0] <= 0:
self.bindings[guest_id][1].stopListening()
self.bindings[guest_id][2].stopListening()
del self.bindings[guest_id]
finally:
self.lock.release()
|
def free_binding(self, guest_id):
self.lock.acquire()
try:
self.bindings[guest_id][0] -= 1
# stop listening if no-one connected
if self.bindings[guest_id][0] == 0:
self.bindings[guest_id][1].stopListening()
self.bindings[guest_id][2].stopListening()
finally:
self.lock.release()
|
https://github.com/cowrie/cowrie/issues/1361
|
2020-06-08T14:16:22.505410Z [backend_pool.nat.ServerFactory#info] Starting factory <backend_pool.nat.ServerFactory object at 0x7fc035e2ce80>
2020-06-08T14:16:22.507706Z [backend_pool.nat.ClientFactory#info] Starting factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:22.508111Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:22.508193Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:22.508593Z [backend_pool.nat.ClientFactory#info] Stopping factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:23.235361Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:23.235508Z [twisted.internet.tcp.Port#info] EMFILE recovery: Closed socket from ('172.16.1.6', 42942)
2020-06-08T14:16:23.235537Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:24.463433Z [-] Unhandled Error
Traceback (most recent call last):
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 399, in startReactor
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 312, in runReactorWithLogging
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1283, in run
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1292, in mainLoop
--- <exception caught here> ---
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 913, in runUntilCurrent
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 280, in producer_loop
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 190, in __producer_check_health
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 151, in has_connectivity
File "/srv/cowrie/cowrie-github/src/backend_pool/util.py", line 18, in nmap_port
File "/usr/lib/python3.6/subprocess.py", line 423, in run
File "/usr/lib/python3.6/subprocess.py", line 687, in __init__
File "/usr/lib/python3.6/subprocess.py", line 1197, in _get_handles
builtins.OSError: [Errno 24] Too many open files
|
builtins.OSError
|
def startFactory(self):
# start the pool thread with default configs
self.pool_service = PoolService(self.nat)
self.pool_service.start_pool()
|
def startFactory(self):
# start the pool thread with default configs
self.pool_service = PoolService()
self.pool_service.start_pool()
|
https://github.com/cowrie/cowrie/issues/1361
|
2020-06-08T14:16:22.505410Z [backend_pool.nat.ServerFactory#info] Starting factory <backend_pool.nat.ServerFactory object at 0x7fc035e2ce80>
2020-06-08T14:16:22.507706Z [backend_pool.nat.ClientFactory#info] Starting factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:22.508111Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:22.508193Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:22.508593Z [backend_pool.nat.ClientFactory#info] Stopping factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:23.235361Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:23.235508Z [twisted.internet.tcp.Port#info] EMFILE recovery: Closed socket from ('172.16.1.6', 42942)
2020-06-08T14:16:23.235537Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:24.463433Z [-] Unhandled Error
Traceback (most recent call last):
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 399, in startReactor
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 312, in runReactorWithLogging
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1283, in run
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1292, in mainLoop
--- <exception caught here> ---
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 913, in runUntilCurrent
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 280, in producer_loop
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 190, in __producer_check_health
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 151, in has_connectivity
File "/srv/cowrie/cowrie-github/src/backend_pool/util.py", line 18, in nmap_port
File "/usr/lib/python3.6/subprocess.py", line 423, in run
File "/usr/lib/python3.6/subprocess.py", line 687, in __init__
File "/usr/lib/python3.6/subprocess.py", line 1197, in _get_handles
builtins.OSError: [Errno 24] Too many open files
|
builtins.OSError
|
def __init__(self, nat_service):
self.qemu = backend_pool.libvirt.backend_service.LibvirtBackendService()
self.nat_service = nat_service
self.guests = []
self.guest_id = 0
self.guest_lock = Lock()
# time in seconds between each loop iteration
self.loop_sleep_time = 5
self.loop_next_call = None
# default configs; custom values will come from the client when they connect to the pool
self.max_vm = 2
self.vm_unused_timeout = 600
self.share_guests = True
# file configs
self.ssh_port = CowrieConfig().getint("backend_pool", "guest_ssh_port", fallback=-1)
self.telnet_port = CowrieConfig().getint(
"backend_pool", "guest_telnet_port", fallback=-1
)
self.local_pool = CowrieConfig().get("proxy", "pool", fallback="local") == "local"
self.pool_only = CowrieConfig().getboolean(
"backend_pool", "pool_only", fallback=False
)
self.use_nat = CowrieConfig().getboolean("backend_pool", "use_nat", fallback=True)
# detect invalid config
if not self.ssh_port > 0 and not self.telnet_port > 0:
log.msg(
eventid="cowrie.backend_pool.service",
format="Invalid configuration: one of SSH or Telnet ports must be defined!",
)
os._exit(1)
self.any_vm_up = False # TODO fix for no VM available
|
def __init__(self):
self.qemu = backend_pool.libvirt.backend_service.LibvirtBackendService()
self.guests = []
self.guest_id = 0
self.guest_lock = Lock()
# time in seconds between each loop iteration
self.loop_sleep_time = 5
self.loop_next_call = None
# default configs; custom values will come from the client when they connect to the pool
self.max_vm = 2
self.vm_unused_timeout = 600
self.share_guests = True
# file configs
self.ssh_port = CowrieConfig().getint("backend_pool", "guest_ssh_port", fallback=-1)
self.telnet_port = CowrieConfig().getint(
"backend_pool", "guest_telnet_port", fallback=-1
)
# detect invalid config
if not self.ssh_port > 0 and not self.telnet_port > 0:
log.msg(
eventid="cowrie.backend_pool.service",
format="Invalid configuration: one of SSH or Telnet ports must be defined!",
)
os._exit(1)
self.any_vm_up = False # TODO fix for no VM available
|
https://github.com/cowrie/cowrie/issues/1361
|
2020-06-08T14:16:22.505410Z [backend_pool.nat.ServerFactory#info] Starting factory <backend_pool.nat.ServerFactory object at 0x7fc035e2ce80>
2020-06-08T14:16:22.507706Z [backend_pool.nat.ClientFactory#info] Starting factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:22.508111Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:22.508193Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:22.508593Z [backend_pool.nat.ClientFactory#info] Stopping factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:23.235361Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:23.235508Z [twisted.internet.tcp.Port#info] EMFILE recovery: Closed socket from ('172.16.1.6', 42942)
2020-06-08T14:16:23.235537Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:24.463433Z [-] Unhandled Error
Traceback (most recent call last):
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 399, in startReactor
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 312, in runReactorWithLogging
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1283, in run
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1292, in mainLoop
--- <exception caught here> ---
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 913, in runUntilCurrent
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 280, in producer_loop
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 190, in __producer_check_health
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 151, in has_connectivity
File "/srv/cowrie/cowrie-github/src/backend_pool/util.py", line 18, in nmap_port
File "/usr/lib/python3.6/subprocess.py", line 423, in run
File "/usr/lib/python3.6/subprocess.py", line 687, in __init__
File "/usr/lib/python3.6/subprocess.py", line 1197, in _get_handles
builtins.OSError: [Errno 24] Too many open files
|
builtins.OSError
|
def stop_pool(self):
# lazy import to avoid exception if not using the backend_pool and libvirt not installed (#1185)
import libvirt
log.msg(eventid="cowrie.backend_pool.service", format="Trying pool clean stop")
# stop loop
if self.loop_next_call:
self.loop_next_call.cancel()
# try destroying all guests
for guest in self.guests:
self.qemu.destroy_guest(guest["domain"], guest["snapshot"])
# force destroy remaining stuff
self.qemu.destroy_all_cowrie()
# close any NAT sockets
if not self.local_pool and self.use_nat or self.pool_only:
log.msg(eventid="cowrie.backend_pool.service", format="Free all NAT bindings")
self.nat_service.free_all()
try:
self.qemu.stop_backend()
except libvirt.libvirtError:
print("Not connected to Qemu")
|
def stop_pool(self):
# lazy import to avoid exception if not using the backend_pool and libvirt not installed (#1185)
import libvirt
log.msg(eventid="cowrie.backend_pool.service", format="Trying pool clean stop")
# stop loop
if self.loop_next_call:
self.loop_next_call.cancel()
# try destroying all guests
for guest in self.guests:
self.qemu.destroy_guest(guest["domain"], guest["snapshot"])
# force destroy remaining stuff
self.qemu.destroy_all_cowrie()
try:
self.qemu.stop_backend()
except libvirt.libvirtError:
print("Not connected to Qemu")
|
https://github.com/cowrie/cowrie/issues/1361
|
2020-06-08T14:16:22.505410Z [backend_pool.nat.ServerFactory#info] Starting factory <backend_pool.nat.ServerFactory object at 0x7fc035e2ce80>
2020-06-08T14:16:22.507706Z [backend_pool.nat.ClientFactory#info] Starting factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:22.508111Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:22.508193Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:22.508593Z [backend_pool.nat.ClientFactory#info] Stopping factory <backend_pool.nat.ClientFactory object at 0x7fc035e2c3c8>
2020-06-08T14:16:23.235361Z [twisted.internet.tcp.Port#info] EMFILE encountered; releasing reserved file descriptor.
2020-06-08T14:16:23.235508Z [twisted.internet.tcp.Port#info] EMFILE recovery: Closed socket from ('172.16.1.6', 42942)
2020-06-08T14:16:23.235537Z [twisted.internet.tcp.Port#info] Re-reserving EMFILE recovery file descriptor.
2020-06-08T14:16:24.463433Z [-] Unhandled Error
Traceback (most recent call last):
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 399, in startReactor
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/application/app.py", line 312, in runReactorWithLogging
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1283, in run
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 1292, in mainLoop
--- <exception caught here> ---
File "/srv/cowrie/cowrie-venv/lib/python3.6/site-packages/twisted/internet/base.py", line 913, in runUntilCurrent
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 280, in producer_loop
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 190, in __producer_check_health
File "/srv/cowrie/cowrie-github/src/backend_pool/pool_service.py", line 151, in has_connectivity
File "/srv/cowrie/cowrie-github/src/backend_pool/util.py", line 18, in nmap_port
File "/usr/lib/python3.6/subprocess.py", line 423, in run
File "/usr/lib/python3.6/subprocess.py", line 687, in __init__
File "/usr/lib/python3.6/subprocess.py", line 1197, in _get_handles
builtins.OSError: [Errno 24] Too many open files
|
builtins.OSError
|
def __init__(self, protocol, *args):
self.protocol = protocol
self.args = list(args)
self.environ = self.protocol.cmdstack[0].environ
self.fs = self.protocol.fs
self.data = None # output data
self.input_data = None # used to store STDIN data passed via PIPE
self.writefn = self.protocol.pp.outReceived
self.errorWritefn = self.protocol.pp.errReceived
# MS-DOS style redirect handling, inside the command
# TODO: handle >>, 2>, etc
if ">" in self.args or ">>" in self.args:
if self.args[-1] in [">", ">>"]:
self.errorWrite("-bash: parse error near '\\n' \n")
return
self.writtenBytes = 0
self.writefn = self.write_to_file
if ">>" in self.args:
index = self.args.index(">>")
b_append = True
else:
index = self.args.index(">")
b_append = False
self.outfile = self.fs.resolve_path(
str(self.args[(index + 1)]), self.protocol.cwd
)
del self.args[index:]
p = self.fs.getfile(self.outfile)
if (
not p
or not p[fs.A_REALFILE]
or p[fs.A_REALFILE].startswith("honeyfs")
or not b_append
):
tmp_fname = "%s-%s-%s-redir_%s" % (
time.strftime("%Y%m%d-%H%M%S"),
self.protocol.getProtoTransport().transportId,
self.protocol.terminal.transport.session.id,
re.sub("[^A-Za-z0-9]", "_", self.outfile),
)
self.safeoutfile = os.path.join(
CowrieConfig().get("honeypot", "download_path"), tmp_fname
)
perm = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
try:
self.fs.mkfile(self.outfile, 0, 0, 0, stat.S_IFREG | perm)
except fs.FileNotFound:
# The outfile locates at a non-existing directory.
self.errorWrite("-bash: %s: No such file or directory\n" % self.outfile)
self.writefn = self.write_to_failed
self.outfile = None
self.safeoutfile = None
except fs.PermissionDenied:
# The outfile locates in a file-system that doesn't allow file creation
self.errorWrite("-bash: %s: Permission denied\n" % self.outfile)
self.writefn = self.write_to_failed
self.outfile = None
self.safeoutfile = None
else:
with open(self.safeoutfile, "ab"):
self.fs.update_realfile(
self.fs.getfile(self.outfile), self.safeoutfile
)
else:
self.safeoutfile = p[fs.A_REALFILE]
|
def __init__(self, protocol, *args):
self.protocol = protocol
self.args = list(args)
self.environ = self.protocol.cmdstack[0].environ
self.fs = self.protocol.fs
self.data = None # output data
self.input_data = None # used to store STDIN data passed via PIPE
self.writefn = self.protocol.pp.outReceived
self.errorWritefn = self.protocol.pp.errReceived
# MS-DOS style redirect handling, inside the command
# TODO: handle >>, 2>, etc
if ">" in self.args or ">>" in self.args:
self.writtenBytes = 0
self.writefn = self.write_to_file
if ">>" in self.args:
index = self.args.index(">>")
b_append = True
else:
index = self.args.index(">")
b_append = False
self.outfile = self.fs.resolve_path(
str(self.args[(index + 1)]), self.protocol.cwd
)
del self.args[index:]
p = self.fs.getfile(self.outfile)
if (
not p
or not p[fs.A_REALFILE]
or p[fs.A_REALFILE].startswith("honeyfs")
or not b_append
):
tmp_fname = "%s-%s-%s-redir_%s" % (
time.strftime("%Y%m%d-%H%M%S"),
self.protocol.getProtoTransport().transportId,
self.protocol.terminal.transport.session.id,
re.sub("[^A-Za-z0-9]", "_", self.outfile),
)
self.safeoutfile = os.path.join(
CowrieConfig().get("honeypot", "download_path"), tmp_fname
)
perm = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
try:
self.fs.mkfile(self.outfile, 0, 0, 0, stat.S_IFREG | perm)
except fs.FileNotFound:
# The outfile locates at a non-existing directory.
self.errorWrite("-bash: %s: No such file or directory\n" % self.outfile)
self.writefn = self.write_to_failed
self.outfile = None
self.safeoutfile = None
except fs.PermissionDenied:
# The outfile locates in a file-system that doesn't allow file creation
self.errorWrite("-bash: %s: Permission denied\n" % self.outfile)
self.writefn = self.write_to_failed
self.outfile = None
self.safeoutfile = None
else:
with open(self.safeoutfile, "ab"):
self.fs.update_realfile(
self.fs.getfile(self.outfile), self.safeoutfile
)
else:
self.safeoutfile = p[fs.A_REALFILE]
|
https://github.com/cowrie/cowrie/issues/854
|
2018-08-13T09:46:46.286838+0300 [CowrieTelnetTransport,68,[redacted]] login attempt [root/] succeeded
2018-08-13T09:46:46.289616+0300 [CowrieTelnetTransport,68,[redacted]] Initialized emulated server as architecture: linux-x64-lsb
2018-08-13T09:46:46.981872+0300 [CowrieTelnetTransport,68,[redacted]] Warning: state changed and new state returned
2018-08-13T09:46:59.394970+0300 [CowrieTelnetTransport,68,[redacted]] CMD: >/dev/netslink/.t && cd /dev/netslink/ && for a in `ls -a /dev/netslink/`; do >$a; done; >retrieve
2018-08-13T09:46:59.397926+0300 [CowrieTelnetTransport,68,[redacted]] Command found: > /dev/netslink/.t
2018-08-13T09:46:59.398837+0300 [CowrieTelnetTransport,68,[redacted]] Command found: cd /dev/netslink/
2018-08-13T09:46:59.400542+0300 [CowrieTelnetTransport,68,[redacted]] Can't find command None
2018-08-13T09:46:59.400790+0300 [CowrieTelnetTransport,68,[redacted]] Command not found: for a in ` ls -a /dev/netslink/ `
2018-08-13T09:46:59.404197+0300 [CowrieTelnetTransport,68,[redacted]] Command found: do >
2018-08-13T09:46:59.404647+0300 [CowrieTelnetTransport,68,[redacted]] Unhandled Error
Traceback (most recent call last):
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/python/log.py", line 103, in callWithLogger
return callWithContext({"system": lp}, func, *args, **kw)
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/python/log.py", line 86, in callWithContext
return context.call({ILogContext: newCtx}, func, *args, **kw)
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
--- <exception caught here> ---
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/internet/posixbase.py", line 614, in _doReadOrWrite
why = selectable.doRead()
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/internet/tcp.py", line 243, in doRead
return self._dataReceived(data)
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/internet/tcp.py", line 249, in _dataReceived
rval = self.protocol.dataReceived(data)
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/conch/telnet.py", line 636, in dataReceived
self.applicationDataReceived(b''.join(appDataBuffer))
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/conch/telnet.py", line 988, in applicationDataReceived
self.protocol.dataReceived(data)
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/conch/telnet.py", line 1035, in dataReceived
self.protocol.dataReceived(data)
File "/home/cowrie/cowrie/src/cowrie/insults/insults.py", line 107, in dataReceived
insults.ServerProtocol.dataReceived(self, data)
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/conch/insults/insults.py", line 537, in dataReceived
self.terminalProtocol.keystrokeReceived(ch, None)
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/conch/recvline.py", line 225, in keystrokeReceived
m()
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 324, in handle_RETURN
return recvline.RecvLine.handle_RETURN(self)
File "/home/cowrie/cowrie/cowrie-env/lib/python2.7/site-packages/twisted/conch/recvline.py", line 292, in handle_RETURN
self.lineReceived(line)
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 183, in lineReceived
self.cmdstack[-1].lineReceived(line)
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 104, in lineReceived
self.runCommand()
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 213, in runCommand
self.protocol.call_command(pp, cmdclass, *cmd_array[0]['rargs'])
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 305, in call_command
HoneyPotBaseProtocol.call_command(self, pp, cmd, *args)
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 192, in call_command
obj.start()
File "/home/cowrie/cowrie/src/cowrie/shell/command.py", line 122, in start
self.exit()
File "/home/cowrie/cowrie/src/cowrie/shell/command.py", line 140, in exit
self.protocol.cmdstack[-1].resume()
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 218, in resume
self.runCommand()
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 213, in runCommand
self.protocol.call_command(pp, cmdclass, *cmd_array[0]['rargs'])
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 305, in call_command
HoneyPotBaseProtocol.call_command(self, pp, cmd, *args)
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 192, in call_command
obj.start()
File "/home/cowrie/cowrie/src/cowrie/shell/command.py", line 122, in start
self.exit()
File "/home/cowrie/cowrie/src/cowrie/shell/command.py", line 140, in exit
self.protocol.cmdstack[-1].resume()
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 218, in resume
self.runCommand()
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 211, in runCommand
runOrPrompt()
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 113, in runOrPrompt
self.runCommand()
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 213, in runCommand
self.protocol.call_command(pp, cmdclass, *cmd_array[0]['rargs'])
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 305, in call_command
HoneyPotBaseProtocol.call_command(self, pp, cmd, *args)
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 189, in call_command
obj = cmd(self, *args)
File "/home/cowrie/cowrie/src/cowrie/shell/command.py", line 53, in __init__
self.outfile = self.fs.resolve_path(str(self.args[(index + 1)]), self.protocol.cwd)
exceptions.IndexError: list index out of range
2018-08-13T09:46:59.408466+0300 [CowrieTelnetTransport,[redacted]] Duplicate TTY log with hash 0c58d3d66a63aeca0c3e68d928a26ff686d1e08f0d1c70fca45d0ed955cf833f
2018-08-13T09:46:59.408836+0300 [CowrieTelnetTransport,68,[redacted]] Closing TTY Log: log/tty/0c58d3d66a63aeca0c3e68d928a26ff686d1e08f0d1c70fca45d0ed955cf833f after 12 seconds
2018-08-13T09:46:59.411186+0300 [CowrieTelnetTransport,68,[redacted]] Connection lost after 47 seconds
|
exceptions.IndexError
|
def write(self, e):
peerIP = e["src_ip"]
ts = e["timestamp"]
system = e.get("system", None)
if system not in [
"cowrie.ssh.factory.CowrieSSHFactory",
"cowrie.telnet.transport.HoneyPotTelnetFactory",
]:
return
today = str(datetime.now().date())
if not self.context.get(today):
self.context = {}
self.context[today] = set()
key = ",".join([peerIP, system])
if key in self.context[today]:
return
self.context[today].add(key)
tags = "scanner,ssh"
port = 22
if e["system"] == "cowrie.telnet.transport.HoneyPotTelnetFactory":
tags = "scanner,telnet"
port = 23
i = {
"user": self.user,
"feed": self.feed,
"indicator": peerIP,
"portlist": port,
"protocol": "tcp",
"tags": tags,
"firsttime": ts,
"lasttime": ts,
"description": self.description,
}
ret = Indicator(self.client, i).submit()
log.msg("logged to csirtg %s " % ret["location"])
|
def write(self, e):
peerIP = e["src_ip"]
ts = e["timestamp"]
system = e["system"]
if system not in [
"cowrie.ssh.factory.CowrieSSHFactory",
"cowrie.telnet.transport.HoneyPotTelnetFactory",
]:
return
today = str(datetime.now().date())
if not self.context.get(today):
self.context = {}
self.context[today] = set()
key = ",".join([peerIP, system])
if key in self.context[today]:
return
self.context[today].add(key)
tags = "scanner,ssh"
port = 22
if e["system"] == "cowrie.telnet.transport.HoneyPotTelnetFactory":
tags = "scanner,telnet"
port = 23
i = {
"user": self.user,
"feed": self.feed,
"indicator": peerIP,
"portlist": port,
"protocol": "tcp",
"tags": tags,
"firsttime": ts,
"lasttime": ts,
"description": self.description,
}
ret = Indicator(self.client, i).submit()
log.msg("logged to csirtg %s " % ret["location"])
|
https://github.com/cowrie/cowrie/issues/676
|
2018-02-11T16:53:14-0500 [twisted.internet.defer#critical] Unhandled error in Deferred:
2018-02-11T16:53:14-0500 [twisted.internet.defer#critical]
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/twisted/internet/tcp.py", line 289, in connectionLost
protocol.connectionLost(reason)
File "/usr/local/lib/python2.7/dist-packages/twisted/web/client.py", line 223, in connectionLost
self.factory._disconnectedDeferred.callback(None)
File "/usr/local/lib/python2.7/dist-packages/twisted/internet/defer.py", line 459, in callback
self._startRunCallbacks(result)
File "/usr/local/lib/python2.7/dist-packages/twisted/internet/defer.py", line 567, in _startRunCallbacks
self._runCallbacks()
--- <exception caught here> ---
File "/usr/local/lib/python2.7/dist-packages/twisted/internet/defer.py", line 653, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/home/cowrie/cowrie/cowrie/commands/wget.py", line 241, in error
url=self.url)
File "/home/cowrie/cowrie/cowrie/shell/protocol.py", line 80, in logDispatch
pt.factory.logDispatch(*msg, **args)
File "/home/cowrie/cowrie/cowrie/telnet/transport.py", line 43, in logDispatch
output.logDispatch(*msg, **args)
File "/home/cowrie/cowrie/cowrie/core/output.py", line 117, in logDispatch
self.emit(ev)
File "/home/cowrie/cowrie/cowrie/core/output.py", line 206, in emit
self.write(ev)
File "/home/cowrie/cowrie/cowrie/output/csirtg.py", line 43, in write
system = e['system']
exceptions.KeyError: 'system'
|
exceptions.KeyError
|
def write(self, logentry):
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
message = json.dumps(logentry) + "\n"
try:
self.sock.sendall(message.encode())
except socket.error as ex:
if ex.errno == 32: # Broken pipe
self.start()
self.sock.sendall(message.encode())
else:
raise
|
def write(self, logentry):
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
message = json.dumps(logentry) + "\n"
try:
self.sock.sendall(message)
except socket.error as ex:
if ex.errno == 32: # Broken pipe
self.start()
self.sock.sendall(message)
else:
raise
|
https://github.com/cowrie/cowrie/issues/1036
|
2019-03-09T18:58:39.452595Z [twisted.logger._observer#critical] Temporarily disabling observer LegacyLogObserverWrapper(<bound method Output.emit of <cowrie.output.socketlog.Output
object at 0x7f1ac2879c18>>) due to exception: [Failure instance: Traceback: <class 'TypeError'>: a bytes-like object is required, not 'str'
/home/cowrie/cowrie/src/cowrie/ssh/transport.py:62:connectionMade
/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/python/threadable.py:53:sync
/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/python/log.py:286:msg
/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/logger/_legacy.py:154:publishToNewObserver
--- <exception caught here> ---
/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/logger/_observer.py:131:__call__
/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/logger/_legacy.py:93:__call__
/home/cowrie/cowrie/src/cowrie/core/output.py:214:emit
/home/cowrie/cowrie/src/cowrie/output/socketlog.py:38:write
]
Traceback (most recent call last):
File "/home/cowrie/cowrie/src/cowrie/ssh/transport.py", line 62, in connectionMade
protocol='ssh'
File "/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/python/threadable.py", line 53, in sync
return function(self, *args, **kwargs)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/python/log.py", line 286, in msg
_publishNew(self._publishPublisher, actualEventDict, textFromEventDict)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/logger/_legacy.py", line 154, in publishToNewObserver
observer(eventDict)
--- <exception caught here> ---
File "/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/logger/_observer.py", line 131, in __call__
observer(event)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.6/site-packages/twisted/logger/_legacy.py", line 93, in __call__
self.legacyObserver(event)
File "/home/cowrie/cowrie/src/cowrie/core/output.py", line 214, in emit
self.write(ev)
File "/home/cowrie/cowrie/src/cowrie/output/socketlog.py", line 38, in write
self.sock.sendall(message)
builtins.TypeError: a bytes-like object is required, not 'str'
|
builtins.TypeError
|
def write(self, logentry):
if "isError" not in logentry:
logentry["isError"] = False
if self.format == "cef":
self.syslog.emit(
{
"message": cowrie.core.cef.formatCef(logentry),
"isError": False,
"system": "cowrie",
}
)
else:
# message appears with additional spaces if message key is defined
logentry["message"] = [logentry["message"]]
self.syslog.emit(logentry)
|
def write(self, logentry):
if self.format == "cef":
self.syslog.emit(
{
"message": cowrie.core.cef.formatCef(logentry),
"isError": False,
"system": "cowrie",
}
)
else:
# message appears with additional spaces if message key is defined
logentry["message"] = [logentry["message"]]
self.syslog.emit(logentry)
|
https://github.com/cowrie/cowrie/issues/917
|
2018-10-11T18:29:01.778300+0000 [twisted.logger._observer#critical] Temporarily disabling observer LegacyLogObserverWrapper(<bound method Output.emit of <cowrie.output.localsyslog.Output object at 0xb55ae7b0>>) due to exception: [Failure instance: Traceback: <type 'exceptions.KeyError'>: 'isError'
/opt/cowrie/src/cowrie/core/checkers.py:110:checkUserPass
/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/python/threadable.py:53:sync
/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/python/log.py:286:msg
/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/logger/_legacy.py:154:publishToNewObserver
--- <exception caught here> ---
/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/logger/_observer.py:131:__call__
/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/logger/_legacy.py:93:__call__
/opt/cowrie/src/cowrie/core/output.py:209:emit
/opt/cowrie/src/cowrie/output/localsyslog.py:65:write
/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/python/syslog.py:76:emit
]
Traceback (most recent call last):
File "/opt/cowrie/src/cowrie/core/checkers.py", line 110, in checkUserPass
password=thepassword)
File "/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/python/threadable.py", line 53, in sync
return function(self, *args, **kwargs)
File "/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/python/log.py", line 286, in msg
_publishNew(self._publishPublisher, actualEventDict, textFromEventDict)
File "/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/logger/_legacy.py", line 154, in publishToNewObserver
observer(eventDict)
--- <exception caught here> ---
File "/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/logger/_observer.py", line 131, in __call__
observer(event)
File "/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/logger/_legacy.py", line 93, in __call__
self.legacyObserver(event)
File "/opt/cowrie/src/cowrie/core/output.py", line 209, in emit
self.write(ev)
File "/opt/cowrie/src/cowrie/output/localsyslog.py", line 65, in write
self.syslog.emit(logentry)
File "/opt/cowrie/cowrie-env/local/lib/python2.7/site-packages/twisted/python/syslog.py", line 76, in emit
if eventDict['isError']:
exceptions.KeyError: 'isError'
|
exceptions.KeyError
|
def call(self):
""" """
escape_fn = lambda s: s
newline = True
try:
optlist, args = getopt.getopt(self.args, "eEn")
for opt in optlist:
if opt[0] == "-e":
escape_fn = functools.partial(str.decode, encoding="string_escape")
elif opt[0] == "-E":
escape_fn = lambda s: s
elif opt[0] == "-n":
newline = False
except:
args = self.args
# FIXME: Wrap in exception, Python escape cannot handle single digit \x codes (e.g. \x1)
try:
self.write(
escape_fn(
re.sub(
'(?<=\\\\)x([0-9a-fA-F]{1})(?=\\\\|"|\s|$)',
"x0\g<1>",
" ".join(args),
)
)
)
except ValueError as e:
log.msg("echo command received Python incorrect hex escape")
if newline is True:
self.write("\n")
|
def call(self):
""" """
escape_fn = lambda s: s
newline = True
try:
optlist, args = getopt.getopt(self.args, "eEn")
for opt in optlist:
if opt[0] == "-e":
escape_fn = functools.partial(str.decode, encoding="string_escape")
elif opt[0] == "-E":
escape_fn = lambda s: s
elif opt[0] == "-n":
newline = False
except:
args = self.args
# FIXME: Wrap in exception, Python escape cannot handle single digit \x codes (e.g. \x1)
try:
self.write(escape_fn(" ".join(args)))
except ValueError as e:
log.msg("echo command received Python incorrect hex escape")
if newline is True:
self.write("\n")
|
https://github.com/cowrie/cowrie/issues/287
|
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/twisted/python/log.py", line 101, in callWithLogger
return callWithContext({"system": lp}, func, *args, **kw)
File "/usr/local/lib/python2.7/site-packages/twisted/python/log.py", line 84, in callWithContext
return context.call({ILogContext: newCtx}, func, *args, **kw)
File "/usr/local/lib/python2.7/site-packages/twisted/python/context.py", line 118, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/usr/local/lib/python2.7/site-packages/twisted/python/context.py", line 81, in callWithContext
return func(*args,**kw)
--- <exception caught here> ---
File "/usr/local/lib/python2.7/site-packages/twisted/internet/posixbase.py", line 597, in _doReadOrWrite
why = selectable.doRead()
File "/usr/local/lib/python2.7/site-packages/twisted/internet/tcp.py", line 209, in doRead
return self._dataReceived(data)
File "/usr/local/lib/python2.7/site-packages/twisted/internet/tcp.py", line 215, in _dataReceived
rval = self.protocol.dataReceived(data)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 589, in dataReceived
self.applicationDataReceived(''.join(appDataBuffer))
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 898, in applicationDataReceived
self.protocol.dataReceived(bytes)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 940, in dataReceived
self.protocol.dataReceived(data)
File "/home/cowrie/cowrie/cowrie/insults/insults.py", line 110, in dataReceived
insults.ServerProtocol.dataReceived(self, data)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/insults/insults.py", line 435, in dataReceived
self.terminalProtocol.keystrokeReceived(ch, None)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/recvline.py", line 201, in keystrokeReceived
m()
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 353, in handle_RETURN
return recvline.RecvLine.handle_RETURN(self)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/recvline.py", line 259, in handle_RETURN
self.lineReceived(line)
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 182, in lineReceived
self.cmdstack[-1].lineReceived(line)
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 213, in lineReceived
self.runCommand()
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 320, in runCommand
self.protocol.call_command(pp, cmdclass, *cmd_array[0]['rargs'])
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 330, in call_command
why = selectable.doRead()
File "/usr/local/lib/python2.7/site-packages/twisted/internet/tcp.py", line 209, in doRead
return self._dataReceived(data)
File "/usr/local/lib/python2.7/site-packages/twisted/internet/tcp.py", line 215, in _dataReceived
rval = self.protocol.dataReceived(data)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 589, in dataReceived
self.applicationDataReceived(''.join(appDataBuffer))
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 898, in applicationDataReceived
self.protocol.dataReceived(bytes)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 940, in dataReceived
self.protocol.dataReceived(data)
File "/home/cowrie/cowrie/cowrie/insults/insults.py", line 110, in dataReceived
insults.ServerProtocol.dataReceived(self, data)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/insults/insults.py", line 435, in dataReceived
self.terminalProtocol.keystrokeReceived(ch, None)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/recvline.py", line 201, in keystrokeReceived
m()
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 353, in handle_RETURN
return recvline.RecvLine.handle_RETURN(self)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/recvline.py", line 259, in handle_RETURN
self.lineReceived(line)
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 182, in lineReceived
self.cmdstack[-1].lineReceived(line)
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 213, in lineReceived
self.runCommand()
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 320, in runCommand
self.protocol.call_command(pp, cmdclass, *cmd_array[0]['rargs'])
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 330, in call_command
HoneyPotBaseProtocol.call_command(self, pp, cmd, *args)
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 192, in call_command
obj.start()
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 84, in start
self.call()
File "/home/cowrie/cowrie/cowrie/commands/base.py", line 139, in call
self.write(escape_fn(' '.join(args)))
exceptions.ValueError: invalid \x escape
|
exceptions.ValueError
|
def call(self):
""" """
escape_fn = lambda s: s
newline = True
try:
optlist, args = getopt.getopt(self.args, "eEn")
for opt in optlist:
if opt[0] == "-e":
escape_fn = functools.partial(str.decode, encoding="string_escape")
elif opt[0] == "-E":
escape_fn = lambda s: s
elif opt[0] == "-n":
newline = False
except:
args = self.args
# FIXME: Wrap in exception, Python escape cannot handle single digit \x codes (e.g. \x1)
try:
self.write(escape_fn(" ".join(args)))
except exceptions.ValueError as e:
log.msg("echo command received Python incorrect hex escape")
if newline is True:
self.write("\n")
|
def call(self):
""" """
escape_fn = lambda s: s
newline = True
try:
optlist, args = getopt.getopt(self.args, "eEn")
for opt in optlist:
if opt[0] == "-e":
escape_fn = functools.partial(str.decode, encoding="string_escape")
elif opt[0] == "-E":
escape_fn = lambda s: s
elif opt[0] == "-n":
newline = False
except:
args = self.args
self.write(escape_fn(" ".join(args)))
if newline is True:
self.write("\n")
|
https://github.com/cowrie/cowrie/issues/287
|
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/twisted/python/log.py", line 101, in callWithLogger
return callWithContext({"system": lp}, func, *args, **kw)
File "/usr/local/lib/python2.7/site-packages/twisted/python/log.py", line 84, in callWithContext
return context.call({ILogContext: newCtx}, func, *args, **kw)
File "/usr/local/lib/python2.7/site-packages/twisted/python/context.py", line 118, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/usr/local/lib/python2.7/site-packages/twisted/python/context.py", line 81, in callWithContext
return func(*args,**kw)
--- <exception caught here> ---
File "/usr/local/lib/python2.7/site-packages/twisted/internet/posixbase.py", line 597, in _doReadOrWrite
why = selectable.doRead()
File "/usr/local/lib/python2.7/site-packages/twisted/internet/tcp.py", line 209, in doRead
return self._dataReceived(data)
File "/usr/local/lib/python2.7/site-packages/twisted/internet/tcp.py", line 215, in _dataReceived
rval = self.protocol.dataReceived(data)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 589, in dataReceived
self.applicationDataReceived(''.join(appDataBuffer))
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 898, in applicationDataReceived
self.protocol.dataReceived(bytes)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 940, in dataReceived
self.protocol.dataReceived(data)
File "/home/cowrie/cowrie/cowrie/insults/insults.py", line 110, in dataReceived
insults.ServerProtocol.dataReceived(self, data)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/insults/insults.py", line 435, in dataReceived
self.terminalProtocol.keystrokeReceived(ch, None)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/recvline.py", line 201, in keystrokeReceived
m()
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 353, in handle_RETURN
return recvline.RecvLine.handle_RETURN(self)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/recvline.py", line 259, in handle_RETURN
self.lineReceived(line)
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 182, in lineReceived
self.cmdstack[-1].lineReceived(line)
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 213, in lineReceived
self.runCommand()
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 320, in runCommand
self.protocol.call_command(pp, cmdclass, *cmd_array[0]['rargs'])
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 330, in call_command
why = selectable.doRead()
File "/usr/local/lib/python2.7/site-packages/twisted/internet/tcp.py", line 209, in doRead
return self._dataReceived(data)
File "/usr/local/lib/python2.7/site-packages/twisted/internet/tcp.py", line 215, in _dataReceived
rval = self.protocol.dataReceived(data)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 589, in dataReceived
self.applicationDataReceived(''.join(appDataBuffer))
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 898, in applicationDataReceived
self.protocol.dataReceived(bytes)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/telnet.py", line 940, in dataReceived
self.protocol.dataReceived(data)
File "/home/cowrie/cowrie/cowrie/insults/insults.py", line 110, in dataReceived
insults.ServerProtocol.dataReceived(self, data)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/insults/insults.py", line 435, in dataReceived
self.terminalProtocol.keystrokeReceived(ch, None)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/recvline.py", line 201, in keystrokeReceived
m()
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 353, in handle_RETURN
return recvline.RecvLine.handle_RETURN(self)
File "/usr/local/lib/python2.7/site-packages/twisted/conch/recvline.py", line 259, in handle_RETURN
self.lineReceived(line)
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 182, in lineReceived
self.cmdstack[-1].lineReceived(line)
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 213, in lineReceived
self.runCommand()
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 320, in runCommand
self.protocol.call_command(pp, cmdclass, *cmd_array[0]['rargs'])
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 330, in call_command
HoneyPotBaseProtocol.call_command(self, pp, cmd, *args)
File "/home/cowrie/cowrie/cowrie/core/protocol.py", line 192, in call_command
obj.start()
File "/home/cowrie/cowrie/cowrie/core/honeypot.py", line 84, in start
self.call()
File "/home/cowrie/cowrie/cowrie/commands/base.py", line 139, in call
self.write(escape_fn(' '.join(args)))
exceptions.ValueError: invalid \x escape
|
exceptions.ValueError
|
def __init__(self, ctx: DistributedContext, op, log_path: str):
self.ctx = ctx
self.op = op
self.log_path = log_path
self.file = open(log_path, "w")
self.stdout = sys.stdout
self.raw_stdout = self.stdout
while isinstance(self.raw_stdout, _LogWrapper):
self.raw_stdout = self.raw_stdout.stdout
# flag about registering log path
self.is_log_path_registered = False
|
def __init__(self, ctx: DistributedContext, op, log_path: str, custom_log_meta):
self.ctx = ctx
self.op = op
self.log_path = log_path
self.custom_log_meta = custom_log_meta
self.file = open(log_path, "w")
self.stdout = sys.stdout
# flag about registering log path
self.is_log_path_registered = False
|
https://github.com/mars-project/mars/issues/2021
|
2021-03-05 15:28:48,690 mars.scheduler.operands.common 335 ERROR Attempt 1: Unexpected error error occurred in executing operand 7518caeab11c4a18bcce34bc13a3cd0f in 11.28.217.38:28254
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.4.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-pyodps-0.10.6.zip/odps/mars_extension/core.py", line 465, in wrapper
f(ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 128, in wrap
return func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 1146, in wrapped
result = func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/remote/core.py", line 218, in execute
result = function(*function_args, **function_kwargs)
File "<ipython-input-323-6fb7c5eb488f>", line 5, in make_feature_new
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 182, in get_feature_df
sub_feature_df = self.feature_map[i](feature_all[i])
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 74, in <lambda>
lambda x: VolumeFeatureWeibull(o, loader_table, config, x).volume_weibull()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 189, in volume_weibull
hl_res = self.volume_hl()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 151, in volume_hl
df = self.get_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 115, in get_volume
fur_vol, _ = VolCalc(self.o, self.loader_table, self.config, self.config_feature).fur_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 321, in fur_volume
fur_spending = self.get_fur_spending()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 273, in get_fur_spending
print('--- Get future spending ---')
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 97, in write
self.stdout.write(data)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 90, in write
self._register_log_path()
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 85, in _register_log_path
worker_addr, log_path)
File "mars/actors/core.pyx", line 65, in mars.actors.core.ActorRef.__getattr__._mt_call
return self.send((item,) + args + (kwargs,), wait=wait)
File "mars/actors/core.pyx", line 37, in mars.actors.core.ActorRef.send
return self._ctx.send(self, message, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 683, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
cpdef send(self, ActorRef actor_ref, object message, bint wait=True, object callback=None):
File "mars/actors/pool/gevent_pool.pyx", line 684, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
return self._send(actor_ref, message, wait_response=True, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 678, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send
return self._pool.apply(self._send_remote, (actor_ref.address, binaries))
File "/opt/conda/lib/python3.7/site-packages/gevent/pool.py", line 161, in apply
return self.spawn(func, *args, **kwds).get()
File "src/gevent/greenlet.py", line 795, in gevent._gevent_cgreenlet.Greenlet.get
File "src/gevent/greenlet.py", line 364, in gevent._gevent_cgreenlet.Greenlet._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 548, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
cpdef object _send_remote(self, str address, object binary):
File "mars/actors/pool/gevent_pool.pyx", line 552, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
with self._new_connection(address) as sock:
File "mars/actors/pool/gevent_pool.pyx", line 555, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
res_binary = read_remote_message(sock.recv)
File "mars/actors/pool/messages.pyx", line 847, in mars.actors.pool.messages.read_remote_message
read_bytes = _wrap_read_func(read_func, 8)
File "mars/actors/pool/messages.pyx", line 830, in mars.actors.pool.messages._wrap_read_func
read_bytes = read_func(size)
File "/opt/conda/lib/python3.7/site-packages/gevent/_socket3.py", line 454, in recv
self._wait(self._read_event)
File "src/gevent/_hub_primitives.py", line 317, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 322, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 304, in gevent._gevent_c_hub_primitives._primitive_wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 55, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_waiter.py", line 151, in gevent._gevent_c_waiter.Waiter.get
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 65, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_gevent_c_greenlet_primitives.pxd", line 35, in gevent._gevent_c_greenlet_primitives._greenlet_switch
greenlet.error: cannot switch to a different thread
|
greenlet.error
|
def _register_log_path(self):
if self.is_log_path_registered:
return
# register log path
session_id = self.ctx.session_id
tileable_op_key = self.op.tileable_op_key
chunk_op_key = self.op.key
worker_addr = self.ctx.get_local_address()
log_path = self.log_path
custom_log_meta = self.ctx.get_custom_log_meta_ref()
custom_log_meta.record_custom_log_path(
session_id, tileable_op_key, chunk_op_key, worker_addr, log_path
)
self.is_log_path_registered = True
|
def _register_log_path(self):
if self.is_log_path_registered:
return
# register log path
session_id = self.ctx.session_id
tileable_op_key = self.op.tileable_op_key
chunk_op_key = self.op.key
worker_addr = self.ctx.get_local_address()
log_path = self.log_path
self.custom_log_meta.record_custom_log_path(
session_id, tileable_op_key, chunk_op_key, worker_addr, log_path
)
self.is_log_path_registered = True
|
https://github.com/mars-project/mars/issues/2021
|
2021-03-05 15:28:48,690 mars.scheduler.operands.common 335 ERROR Attempt 1: Unexpected error error occurred in executing operand 7518caeab11c4a18bcce34bc13a3cd0f in 11.28.217.38:28254
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.4.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-pyodps-0.10.6.zip/odps/mars_extension/core.py", line 465, in wrapper
f(ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 128, in wrap
return func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 1146, in wrapped
result = func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/remote/core.py", line 218, in execute
result = function(*function_args, **function_kwargs)
File "<ipython-input-323-6fb7c5eb488f>", line 5, in make_feature_new
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 182, in get_feature_df
sub_feature_df = self.feature_map[i](feature_all[i])
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 74, in <lambda>
lambda x: VolumeFeatureWeibull(o, loader_table, config, x).volume_weibull()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 189, in volume_weibull
hl_res = self.volume_hl()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 151, in volume_hl
df = self.get_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 115, in get_volume
fur_vol, _ = VolCalc(self.o, self.loader_table, self.config, self.config_feature).fur_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 321, in fur_volume
fur_spending = self.get_fur_spending()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 273, in get_fur_spending
print('--- Get future spending ---')
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 97, in write
self.stdout.write(data)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 90, in write
self._register_log_path()
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 85, in _register_log_path
worker_addr, log_path)
File "mars/actors/core.pyx", line 65, in mars.actors.core.ActorRef.__getattr__._mt_call
return self.send((item,) + args + (kwargs,), wait=wait)
File "mars/actors/core.pyx", line 37, in mars.actors.core.ActorRef.send
return self._ctx.send(self, message, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 683, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
cpdef send(self, ActorRef actor_ref, object message, bint wait=True, object callback=None):
File "mars/actors/pool/gevent_pool.pyx", line 684, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
return self._send(actor_ref, message, wait_response=True, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 678, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send
return self._pool.apply(self._send_remote, (actor_ref.address, binaries))
File "/opt/conda/lib/python3.7/site-packages/gevent/pool.py", line 161, in apply
return self.spawn(func, *args, **kwds).get()
File "src/gevent/greenlet.py", line 795, in gevent._gevent_cgreenlet.Greenlet.get
File "src/gevent/greenlet.py", line 364, in gevent._gevent_cgreenlet.Greenlet._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 548, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
cpdef object _send_remote(self, str address, object binary):
File "mars/actors/pool/gevent_pool.pyx", line 552, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
with self._new_connection(address) as sock:
File "mars/actors/pool/gevent_pool.pyx", line 555, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
res_binary = read_remote_message(sock.recv)
File "mars/actors/pool/messages.pyx", line 847, in mars.actors.pool.messages.read_remote_message
read_bytes = _wrap_read_func(read_func, 8)
File "mars/actors/pool/messages.pyx", line 830, in mars.actors.pool.messages._wrap_read_func
read_bytes = read_func(size)
File "/opt/conda/lib/python3.7/site-packages/gevent/_socket3.py", line 454, in recv
self._wait(self._read_event)
File "src/gevent/_hub_primitives.py", line 317, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 322, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 304, in gevent._gevent_c_hub_primitives._primitive_wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 55, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_waiter.py", line 151, in gevent._gevent_c_waiter.Waiter.get
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 65, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_gevent_c_greenlet_primitives.pxd", line 35, in gevent._gevent_c_greenlet_primitives._greenlet_switch
greenlet.error: cannot switch to a different thread
|
greenlet.error
|
def write(self, data):
self._register_log_path()
# write into file
self.file.write(data)
# force flush to make sure `fetch_log` can get stdout in time
self.file.flush()
# write into previous stdout
self.raw_stdout.write(data)
|
def write(self, data):
self._register_log_path()
# write into file
self.file.write(data)
# force flush to make sure `fetch_log` can get stdout in time
self.file.flush()
# write into previous stdout
self.stdout.write(data)
|
https://github.com/mars-project/mars/issues/2021
|
2021-03-05 15:28:48,690 mars.scheduler.operands.common 335 ERROR Attempt 1: Unexpected error error occurred in executing operand 7518caeab11c4a18bcce34bc13a3cd0f in 11.28.217.38:28254
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.4.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-pyodps-0.10.6.zip/odps/mars_extension/core.py", line 465, in wrapper
f(ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 128, in wrap
return func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 1146, in wrapped
result = func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/remote/core.py", line 218, in execute
result = function(*function_args, **function_kwargs)
File "<ipython-input-323-6fb7c5eb488f>", line 5, in make_feature_new
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 182, in get_feature_df
sub_feature_df = self.feature_map[i](feature_all[i])
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 74, in <lambda>
lambda x: VolumeFeatureWeibull(o, loader_table, config, x).volume_weibull()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 189, in volume_weibull
hl_res = self.volume_hl()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 151, in volume_hl
df = self.get_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 115, in get_volume
fur_vol, _ = VolCalc(self.o, self.loader_table, self.config, self.config_feature).fur_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 321, in fur_volume
fur_spending = self.get_fur_spending()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 273, in get_fur_spending
print('--- Get future spending ---')
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 97, in write
self.stdout.write(data)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 90, in write
self._register_log_path()
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 85, in _register_log_path
worker_addr, log_path)
File "mars/actors/core.pyx", line 65, in mars.actors.core.ActorRef.__getattr__._mt_call
return self.send((item,) + args + (kwargs,), wait=wait)
File "mars/actors/core.pyx", line 37, in mars.actors.core.ActorRef.send
return self._ctx.send(self, message, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 683, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
cpdef send(self, ActorRef actor_ref, object message, bint wait=True, object callback=None):
File "mars/actors/pool/gevent_pool.pyx", line 684, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
return self._send(actor_ref, message, wait_response=True, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 678, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send
return self._pool.apply(self._send_remote, (actor_ref.address, binaries))
File "/opt/conda/lib/python3.7/site-packages/gevent/pool.py", line 161, in apply
return self.spawn(func, *args, **kwds).get()
File "src/gevent/greenlet.py", line 795, in gevent._gevent_cgreenlet.Greenlet.get
File "src/gevent/greenlet.py", line 364, in gevent._gevent_cgreenlet.Greenlet._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 548, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
cpdef object _send_remote(self, str address, object binary):
File "mars/actors/pool/gevent_pool.pyx", line 552, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
with self._new_connection(address) as sock:
File "mars/actors/pool/gevent_pool.pyx", line 555, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
res_binary = read_remote_message(sock.recv)
File "mars/actors/pool/messages.pyx", line 847, in mars.actors.pool.messages.read_remote_message
read_bytes = _wrap_read_func(read_func, 8)
File "mars/actors/pool/messages.pyx", line 830, in mars.actors.pool.messages._wrap_read_func
read_bytes = read_func(size)
File "/opt/conda/lib/python3.7/site-packages/gevent/_socket3.py", line 454, in recv
self._wait(self._read_event)
File "src/gevent/_hub_primitives.py", line 317, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 322, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 304, in gevent._gevent_c_hub_primitives._primitive_wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 55, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_waiter.py", line 151, in gevent._gevent_c_waiter.Waiter.get
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 65, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_gevent_c_greenlet_primitives.pxd", line 35, in gevent._gevent_c_greenlet_primitives._greenlet_switch
greenlet.error: cannot switch to a different thread
|
greenlet.error
|
def flush(self):
self.raw_stdout.flush()
|
def flush(self):
self.stdout.flush()
|
https://github.com/mars-project/mars/issues/2021
|
2021-03-05 15:28:48,690 mars.scheduler.operands.common 335 ERROR Attempt 1: Unexpected error error occurred in executing operand 7518caeab11c4a18bcce34bc13a3cd0f in 11.28.217.38:28254
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.4.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-pyodps-0.10.6.zip/odps/mars_extension/core.py", line 465, in wrapper
f(ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 128, in wrap
return func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 1146, in wrapped
result = func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/remote/core.py", line 218, in execute
result = function(*function_args, **function_kwargs)
File "<ipython-input-323-6fb7c5eb488f>", line 5, in make_feature_new
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 182, in get_feature_df
sub_feature_df = self.feature_map[i](feature_all[i])
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 74, in <lambda>
lambda x: VolumeFeatureWeibull(o, loader_table, config, x).volume_weibull()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 189, in volume_weibull
hl_res = self.volume_hl()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 151, in volume_hl
df = self.get_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 115, in get_volume
fur_vol, _ = VolCalc(self.o, self.loader_table, self.config, self.config_feature).fur_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 321, in fur_volume
fur_spending = self.get_fur_spending()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 273, in get_fur_spending
print('--- Get future spending ---')
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 97, in write
self.stdout.write(data)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 90, in write
self._register_log_path()
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 85, in _register_log_path
worker_addr, log_path)
File "mars/actors/core.pyx", line 65, in mars.actors.core.ActorRef.__getattr__._mt_call
return self.send((item,) + args + (kwargs,), wait=wait)
File "mars/actors/core.pyx", line 37, in mars.actors.core.ActorRef.send
return self._ctx.send(self, message, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 683, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
cpdef send(self, ActorRef actor_ref, object message, bint wait=True, object callback=None):
File "mars/actors/pool/gevent_pool.pyx", line 684, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
return self._send(actor_ref, message, wait_response=True, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 678, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send
return self._pool.apply(self._send_remote, (actor_ref.address, binaries))
File "/opt/conda/lib/python3.7/site-packages/gevent/pool.py", line 161, in apply
return self.spawn(func, *args, **kwds).get()
File "src/gevent/greenlet.py", line 795, in gevent._gevent_cgreenlet.Greenlet.get
File "src/gevent/greenlet.py", line 364, in gevent._gevent_cgreenlet.Greenlet._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 548, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
cpdef object _send_remote(self, str address, object binary):
File "mars/actors/pool/gevent_pool.pyx", line 552, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
with self._new_connection(address) as sock:
File "mars/actors/pool/gevent_pool.pyx", line 555, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
res_binary = read_remote_message(sock.recv)
File "mars/actors/pool/messages.pyx", line 847, in mars.actors.pool.messages.read_remote_message
read_bytes = _wrap_read_func(read_func, 8)
File "mars/actors/pool/messages.pyx", line 830, in mars.actors.pool.messages._wrap_read_func
read_bytes = read_func(size)
File "/opt/conda/lib/python3.7/site-packages/gevent/_socket3.py", line 454, in recv
self._wait(self._read_event)
File "src/gevent/_hub_primitives.py", line 317, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 322, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 304, in gevent._gevent_c_hub_primitives._primitive_wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 55, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_waiter.py", line 151, in gevent._gevent_c_waiter.Waiter.get
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 65, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_gevent_c_greenlet_primitives.pxd", line 35, in gevent._gevent_c_greenlet_primitives._greenlet_switch
greenlet.error: cannot switch to a different thread
|
greenlet.error
|
def redirect_custom_log(func):
"""
Redirect stdout to a file by wrapping ``Operand.execute(ctx, op)``
"""
@functools.wraps(func)
def wrap(cls, ctx: DistributedContext, op):
# import inside, or Ray backend may fail
from .config import options
if (
getattr(ctx, "running_mode", RunningMode.local) == RunningMode.local
or options.custom_log_dir is None
):
# do nothing for local scheduler
return func(cls, ctx, op)
log_path = gen_log_path(ctx.session_id, op.key)
with _LogWrapper(ctx, op, log_path):
return func(cls, ctx, op)
return wrap
|
def redirect_custom_log(func):
"""
Redirect stdout to a file by wrapping ``Operand.execute(ctx, op)``
"""
@functools.wraps(func)
def wrap(cls, ctx: DistributedContext, op):
# import inside, or Ray backend may fail
from .config import options
if (
getattr(ctx, "running_mode", RunningMode.local) == RunningMode.local
or options.custom_log_dir is None
):
# do nothing for local scheduler
return func(cls, ctx, op)
custom_log_meta = ctx.get_custom_log_meta_ref()
log_path = gen_log_path(ctx.session_id, op.key)
with _LogWrapper(ctx, op, log_path, custom_log_meta):
return func(cls, ctx, op)
return wrap
|
https://github.com/mars-project/mars/issues/2021
|
2021-03-05 15:28:48,690 mars.scheduler.operands.common 335 ERROR Attempt 1: Unexpected error error occurred in executing operand 7518caeab11c4a18bcce34bc13a3cd0f in 11.28.217.38:28254
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.4.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-pyodps-0.10.6.zip/odps/mars_extension/core.py", line 465, in wrapper
f(ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 128, in wrap
return func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 1146, in wrapped
result = func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/remote/core.py", line 218, in execute
result = function(*function_args, **function_kwargs)
File "<ipython-input-323-6fb7c5eb488f>", line 5, in make_feature_new
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 182, in get_feature_df
sub_feature_df = self.feature_map[i](feature_all[i])
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 74, in <lambda>
lambda x: VolumeFeatureWeibull(o, loader_table, config, x).volume_weibull()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 189, in volume_weibull
hl_res = self.volume_hl()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 151, in volume_hl
df = self.get_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 115, in get_volume
fur_vol, _ = VolCalc(self.o, self.loader_table, self.config, self.config_feature).fur_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 321, in fur_volume
fur_spending = self.get_fur_spending()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 273, in get_fur_spending
print('--- Get future spending ---')
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 97, in write
self.stdout.write(data)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 90, in write
self._register_log_path()
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 85, in _register_log_path
worker_addr, log_path)
File "mars/actors/core.pyx", line 65, in mars.actors.core.ActorRef.__getattr__._mt_call
return self.send((item,) + args + (kwargs,), wait=wait)
File "mars/actors/core.pyx", line 37, in mars.actors.core.ActorRef.send
return self._ctx.send(self, message, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 683, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
cpdef send(self, ActorRef actor_ref, object message, bint wait=True, object callback=None):
File "mars/actors/pool/gevent_pool.pyx", line 684, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
return self._send(actor_ref, message, wait_response=True, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 678, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send
return self._pool.apply(self._send_remote, (actor_ref.address, binaries))
File "/opt/conda/lib/python3.7/site-packages/gevent/pool.py", line 161, in apply
return self.spawn(func, *args, **kwds).get()
File "src/gevent/greenlet.py", line 795, in gevent._gevent_cgreenlet.Greenlet.get
File "src/gevent/greenlet.py", line 364, in gevent._gevent_cgreenlet.Greenlet._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 548, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
cpdef object _send_remote(self, str address, object binary):
File "mars/actors/pool/gevent_pool.pyx", line 552, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
with self._new_connection(address) as sock:
File "mars/actors/pool/gevent_pool.pyx", line 555, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
res_binary = read_remote_message(sock.recv)
File "mars/actors/pool/messages.pyx", line 847, in mars.actors.pool.messages.read_remote_message
read_bytes = _wrap_read_func(read_func, 8)
File "mars/actors/pool/messages.pyx", line 830, in mars.actors.pool.messages._wrap_read_func
read_bytes = read_func(size)
File "/opt/conda/lib/python3.7/site-packages/gevent/_socket3.py", line 454, in recv
self._wait(self._read_event)
File "src/gevent/_hub_primitives.py", line 317, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 322, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 304, in gevent._gevent_c_hub_primitives._primitive_wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 55, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_waiter.py", line 151, in gevent._gevent_c_waiter.Waiter.get
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 65, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_gevent_c_greenlet_primitives.pxd", line 35, in gevent._gevent_c_greenlet_primitives._greenlet_switch
greenlet.error: cannot switch to a different thread
|
greenlet.error
|
def wrap(cls, ctx: DistributedContext, op):
# import inside, or Ray backend may fail
from .config import options
if (
getattr(ctx, "running_mode", RunningMode.local) == RunningMode.local
or options.custom_log_dir is None
):
# do nothing for local scheduler
return func(cls, ctx, op)
log_path = gen_log_path(ctx.session_id, op.key)
with _LogWrapper(ctx, op, log_path):
return func(cls, ctx, op)
|
def wrap(cls, ctx: DistributedContext, op):
# import inside, or Ray backend may fail
from .config import options
if (
getattr(ctx, "running_mode", RunningMode.local) == RunningMode.local
or options.custom_log_dir is None
):
# do nothing for local scheduler
return func(cls, ctx, op)
custom_log_meta = ctx.get_custom_log_meta_ref()
log_path = gen_log_path(ctx.session_id, op.key)
with _LogWrapper(ctx, op, log_path, custom_log_meta):
return func(cls, ctx, op)
|
https://github.com/mars-project/mars/issues/2021
|
2021-03-05 15:28:48,690 mars.scheduler.operands.common 335 ERROR Attempt 1: Unexpected error error occurred in executing operand 7518caeab11c4a18bcce34bc13a3cd0f in 11.28.217.38:28254
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.4.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-pyodps-0.10.6.zip/odps/mars_extension/core.py", line 465, in wrapper
f(ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 128, in wrap
return func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/utils.py", line 1146, in wrapped
result = func(cls, ctx, op)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/remote/core.py", line 218, in execute
result = function(*function_args, **function_kwargs)
File "<ipython-input-323-6fb7c5eb488f>", line 5, in make_feature_new
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 182, in get_feature_df
sub_feature_df = self.feature_map[i](feature_all[i])
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/feature_engine.py", line 74, in <lambda>
lambda x: VolumeFeatureWeibull(o, loader_table, config, x).volume_weibull()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 189, in volume_weibull
hl_res = self.volume_hl()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 151, in volume_hl
df = self.get_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/traffic_weibull.py", line 115, in get_volume
fur_vol, _ = VolCalc(self.o, self.loader_table, self.config, self.config_feature).fur_volume()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 321, in fur_volume
fur_spending = self.get_fur_spending()
File "/opt/conda/lib/python3.7/site-packages/autoforecast/modules/feature_engineer/input_media.py", line 273, in get_fur_spending
print('--- Get future spending ---')
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 97, in write
self.stdout.write(data)
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 90, in write
self._register_log_path()
File "/home/admin/work/_public-mars-0.6.4.zip/mars/custom_log.py", line 85, in _register_log_path
worker_addr, log_path)
File "mars/actors/core.pyx", line 65, in mars.actors.core.ActorRef.__getattr__._mt_call
return self.send((item,) + args + (kwargs,), wait=wait)
File "mars/actors/core.pyx", line 37, in mars.actors.core.ActorRef.send
return self._ctx.send(self, message, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 683, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
cpdef send(self, ActorRef actor_ref, object message, bint wait=True, object callback=None):
File "mars/actors/pool/gevent_pool.pyx", line 684, in mars.actors.pool.gevent_pool.ActorRemoteHelper.send
return self._send(actor_ref, message, wait_response=True, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 678, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send
return self._pool.apply(self._send_remote, (actor_ref.address, binaries))
File "/opt/conda/lib/python3.7/site-packages/gevent/pool.py", line 161, in apply
return self.spawn(func, *args, **kwds).get()
File "src/gevent/greenlet.py", line 795, in gevent._gevent_cgreenlet.Greenlet.get
File "src/gevent/greenlet.py", line 364, in gevent._gevent_cgreenlet.Greenlet._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 548, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
cpdef object _send_remote(self, str address, object binary):
File "mars/actors/pool/gevent_pool.pyx", line 552, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
with self._new_connection(address) as sock:
File "mars/actors/pool/gevent_pool.pyx", line 555, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
res_binary = read_remote_message(sock.recv)
File "mars/actors/pool/messages.pyx", line 847, in mars.actors.pool.messages.read_remote_message
read_bytes = _wrap_read_func(read_func, 8)
File "mars/actors/pool/messages.pyx", line 830, in mars.actors.pool.messages._wrap_read_func
read_bytes = read_func(size)
File "/opt/conda/lib/python3.7/site-packages/gevent/_socket3.py", line 454, in recv
self._wait(self._read_event)
File "src/gevent/_hub_primitives.py", line 317, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 322, in gevent._gevent_c_hub_primitives.wait_on_socket
File "src/gevent/_hub_primitives.py", line 304, in gevent._gevent_c_hub_primitives._primitive_wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 46, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_hub_primitives.py", line 55, in gevent._gevent_c_hub_primitives.WaitOperationsGreenlet.wait
File "src/gevent/_waiter.py", line 151, in gevent._gevent_c_waiter.Waiter.get
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 61, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_greenlet_primitives.py", line 65, in gevent._gevent_c_greenlet_primitives.SwitchOutGreenletWithLoop.switch
File "src/gevent/_gevent_c_greenlet_primitives.pxd", line 35, in gevent._gevent_c_greenlet_primitives._greenlet_switch
greenlet.error: cannot switch to a different thread
|
greenlet.error
|
def __call__(self, input_tensor, index, columns):
if isinstance(input_tensor, dict):
return self._call_input_1d_tileables(input_tensor, index, columns)
elif input_tensor is not None:
return self._call_input_tensor(input_tensor, index, columns)
else:
return self._call_tensor_none(index, columns)
|
def __call__(self, input_tensor, index, columns):
if isinstance(input_tensor, dict):
return self._call_input_1d_tileables(input_tensor, index, columns)
else:
return self._call_input_tensor(input_tensor, index, columns)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def _call_input_1d_tileables(self, input_1d_tileables, index, columns):
tileables = []
shape = None
for tileable in input_1d_tileables.values():
tileable_shape = astensor(tileable).shape
if len(tileable_shape) > 0:
if shape is None:
shape = tileable_shape
elif shape != tileable_shape:
raise ValueError("input 1-d tensors should have same shape")
if isinstance(tileable, (Base, Entity)):
tileables.append(tileable)
if index is not None:
tileable_size = tileables[0].shape[0]
if hasattr(index, "shape"):
index_size = index.shape[0]
else:
index_size = len(index)
if (
not pd.isna(tileable_size)
and not pd.isna(index_size)
and tileable_size != index_size
):
raise ValueError(
f"index {index} should have the same shape with tensor: {tileable_size}"
)
index_value = self._process_index(index, tileables)
else:
index_value = parse_index(pd.RangeIndex(0, tileables[0].shape[0]))
if columns is not None:
if len(input_1d_tileables) != len(columns):
raise ValueError(
f"columns {columns} should have size {len(input_1d_tileables)}"
)
if not isinstance(columns, pd.Index):
if isinstance(columns, Base):
raise NotImplementedError("The columns value cannot be a tileable")
columns = pd.Index(columns)
columns_value = parse_index(columns, store_data=True)
else:
columns_value = parse_index(
pd.RangeIndex(0, len(input_1d_tileables)), store_data=True
)
shape = (shape[0], len(input_1d_tileables))
return self.new_dataframe(
tileables,
shape,
dtypes=self.dtypes,
index_value=index_value,
columns_value=columns_value,
)
|
def _call_input_1d_tileables(self, input_1d_tileables, index, columns):
tileables = []
shape = None
for tileable in input_1d_tileables.values():
tileable_shape = astensor(tileable).shape
if len(tileable_shape) > 0:
if shape is None:
shape = tileable_shape
elif shape != tileable_shape:
raise ValueError("input 1-d tensors should have same shape")
if isinstance(tileable, (Base, Entity)):
tileables.append(tileable)
if index is not None:
if tileables[0].shape[0] != len(index):
raise ValueError(
f"index {index} should have the same shape with tensor: {input_1d_tileables[0].shape[0]}"
)
index_value = self._process_index(index, tileables)
else:
index_value = parse_index(pd.RangeIndex(0, tileables[0].shape[0]))
if columns is not None:
if len(input_1d_tileables) != len(columns):
raise ValueError(
f"columns {columns} should have size {len(input_1d_tileables)}"
)
if not isinstance(columns, pd.Index):
if isinstance(columns, Base):
raise NotImplementedError("The columns value cannot be a tileable")
columns = pd.Index(columns)
columns_value = parse_index(columns, store_data=True)
else:
columns_value = parse_index(
pd.RangeIndex(0, len(input_1d_tileables)), store_data=True
)
shape = (shape[0], len(input_1d_tileables))
return self.new_dataframe(
tileables,
shape,
dtypes=self.dtypes,
index_value=index_value,
columns_value=columns_value,
)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def tile(cls, op):
if isinstance(op.input, dict):
return cls._tile_input_1d_tileables(op)
elif op.input is not None:
return cls._tile_input_tensor(op)
else:
return cls._tile_tensor_none(op)
|
def tile(cls, op):
if isinstance(op.input, dict):
return cls._tile_input_1d_tileables(op)
else:
return cls._tile_input_tensor(op)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def execute(cls, ctx, op):
chunk = op.outputs[0]
if isinstance(op.input, dict):
d = OrderedDict()
for k, v in op.input.items():
if hasattr(v, "key"):
d[k] = ctx[v.key]
else:
d[k] = v
if op.index is not None:
index_data = ctx[op.index.key]
else:
index_data = chunk.index_value.to_pandas()
ctx[chunk.key] = pd.DataFrame(
d, index=index_data, columns=chunk.columns_value.to_pandas()
)
elif op.input is not None:
tensor_data = ctx[op.inputs[0].key]
if isinstance(tensor_data, pd.Series):
ctx[chunk.key] = tensor_data.to_frame(name=chunk.dtypes.index[0])
else:
if op.index is not None:
# index is a tensor
index_data = ctx[op.inputs[1].key]
else:
index_data = chunk.index_value.to_pandas()
if isinstance(index_data, pd.RangeIndex) and len(index_data) == 0:
index_data = None
ctx[chunk.key] = pd.DataFrame(
tensor_data, index=index_data, columns=chunk.columns_value.to_pandas()
)
else:
index_data = ctx[op.index.key]
ctx[chunk.key] = pd.DataFrame(
index=index_data, columns=chunk.columns_value.to_pandas()
)
|
def execute(cls, ctx, op):
chunk = op.outputs[0]
if isinstance(op.input, dict):
d = OrderedDict()
for k, v in op.input.items():
if hasattr(v, "key"):
d[k] = ctx[v.key]
else:
d[k] = v
if op.index is not None:
index_data = ctx[op.index.key]
else:
index_data = chunk.index_value.to_pandas()
ctx[chunk.key] = pd.DataFrame(
d, index=index_data, columns=chunk.columns_value.to_pandas()
)
else:
tensor_data = ctx[op.inputs[0].key]
if isinstance(tensor_data, pd.Series):
ctx[chunk.key] = tensor_data.to_frame(name=chunk.dtypes.index[0])
else:
if op.index is not None:
# index is a tensor
index_data = ctx[op.inputs[1].key]
else:
index_data = chunk.index_value.to_pandas()
if isinstance(index_data, pd.RangeIndex) and len(index_data) == 0:
index_data = None
ctx[chunk.key] = pd.DataFrame(
tensor_data, index=index_data, columns=chunk.columns_value.to_pandas()
)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def dataframe_from_tensor(tensor, index=None, columns=None, gpu=None, sparse=False):
if tensor is not None:
if tensor.ndim > 2 or tensor.ndim <= 0:
raise TypeError(
f"Not support create DataFrame from {tensor.ndim} dims tensor"
)
try:
col_num = tensor.shape[1]
except IndexError:
col_num = 1
gpu = tensor.op.gpu if gpu is None else gpu
dtypes = pd.Series([tensor.dtype] * col_num, index=columns)
else:
gpu = None
if columns is not None:
dtypes = pd.Series([], index=columns)
else:
dtypes = pd.Series([], index=pd.Index([], dtype=object))
op = DataFrameFromTensor(input_=tensor, dtypes=dtypes, gpu=gpu, sparse=sparse)
return op(tensor, index, columns)
|
def dataframe_from_tensor(tensor, index=None, columns=None, gpu=None, sparse=False):
if tensor.ndim > 2 or tensor.ndim <= 0:
raise TypeError(f"Not support create DataFrame from {tensor.ndim} dims tensor")
try:
col_num = tensor.shape[1]
except IndexError:
col_num = 1
gpu = tensor.op.gpu if gpu is None else gpu
op = DataFrameFromTensor(
input_=tensor,
dtypes=pd.Series([tensor.dtype] * col_num, index=columns),
gpu=gpu,
sparse=sparse,
)
return op(tensor, index, columns)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def __init__(self, input_=None, index=None, dtype=None, gpu=None, sparse=None, **kw):
super().__init__(
_input=input_,
_index=index,
_dtype=dtype,
_gpu=gpu,
_sparse=sparse,
_output_types=[OutputType.series],
**kw,
)
|
def __init__(self, index=None, dtype=None, gpu=None, sparse=None, **kw):
super().__init__(
_index=index,
_dtype=dtype,
_gpu=gpu,
_sparse=sparse,
_output_types=[OutputType.series],
**kw,
)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
if self._input is not None:
self._input = self._inputs[0]
if self._index is not None:
self._index = self._inputs[-1]
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
self._input = self._inputs[0]
if self._index is not None:
self._index = self._inputs[-1]
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def tile(cls, op):
if op.index is None:
# check all inputs to make sure no unknown chunk shape
check_chunks_unknown_shape(op.inputs, TilesError)
if op.input is None:
return cls._tile_tensor_none(op)
out_series = op.outputs[0]
in_tensor = op.inputs[0]
nsplits = in_tensor.nsplits
if op.index is not None:
index_tensor = op.index.rechunk([nsplits[0]])._inplace_tile()
else:
index_tensor = None
index_start = 0
out_chunks = []
series_index = out_series.index_value.to_pandas()
for in_chunk in in_tensor.chunks:
new_op = op.copy().reset_key()
new_op.extra_params["index_start"] = index_start
chunk_inputs = [in_chunk]
if index_tensor is not None:
index_chunk = index_tensor.cix[in_chunk.index]
chunk_inputs.append(index_chunk)
if isinstance(op.index, INDEX_TYPE):
index_value = index_chunk.index_value
else:
index_value = parse_index(
pd.Index([], dtype=in_chunk.dtype),
index_chunk,
type(new_op).__name__,
)
else:
chunk_pd_index = series_index[index_start : index_start + in_chunk.shape[0]]
index_value = parse_index(chunk_pd_index, store_data=True)
index_start += in_chunk.shape[0]
out_chunk = new_op.new_chunk(
chunk_inputs,
shape=in_chunk.shape,
index=in_chunk.index,
index_value=index_value,
name=out_series.name,
dtype=out_series.dtype,
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tileables(
op.inputs,
shape=out_series.shape,
dtype=out_series.dtype,
index_value=out_series.index_value,
name=out_series.name,
chunks=out_chunks,
nsplits=in_tensor.nsplits,
)
|
def tile(cls, op):
if op.index is None:
# check all inputs to make sure no unknown chunk shape
check_chunks_unknown_shape(op.inputs, TilesError)
out_series = op.outputs[0]
in_tensor = op.inputs[0]
nsplits = in_tensor.nsplits
if op.index is not None:
index_tensor = op.index.rechunk([nsplits[0]])._inplace_tile()
else:
index_tensor = None
index_start = 0
out_chunks = []
series_index = out_series.index_value.to_pandas()
for in_chunk in in_tensor.chunks:
new_op = op.copy().reset_key()
new_op.extra_params["index_start"] = index_start
chunk_inputs = [in_chunk]
if index_tensor is not None:
index_chunk = index_tensor.cix[in_chunk.index]
chunk_inputs.append(index_chunk)
if isinstance(op.index, INDEX_TYPE):
index_value = index_chunk.index_value
else:
index_value = parse_index(
pd.Index([], dtype=in_chunk.dtype),
index_chunk,
type(new_op).__name__,
)
else:
chunk_pd_index = series_index[index_start : index_start + in_chunk.shape[0]]
index_value = parse_index(chunk_pd_index, store_data=True)
index_start += in_chunk.shape[0]
out_chunk = new_op.new_chunk(
chunk_inputs,
shape=in_chunk.shape,
index=in_chunk.index,
index_value=index_value,
name=out_series.name,
dtype=out_series.dtype,
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tileables(
op.inputs,
shape=out_series.shape,
dtype=out_series.dtype,
index_value=out_series.index_value,
name=out_series.name,
chunks=out_chunks,
nsplits=in_tensor.nsplits,
)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def execute(cls, ctx, op):
chunk = op.outputs[0]
if op.input is not None:
tensor_data = ctx[op.input.key]
else:
tensor_data = None
if op.index is not None:
index_data = ctx[op.index.key]
else:
index_data = chunk.index_value.to_pandas()
ctx[chunk.key] = pd.Series(
tensor_data, index=index_data, name=chunk.name, dtype=chunk.dtype
)
|
def execute(cls, ctx, op):
chunk = op.outputs[0]
tensor_data = ctx[op.inputs[0].key]
if op.index is not None:
index_data = ctx[op.inputs[1].key]
else:
index_data = chunk.index_value.to_pandas()
ctx[chunk.key] = pd.Series(tensor_data, index=index_data, name=chunk.name)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def __call__(self, input_tensor, index, name):
inputs = [input_tensor] if input_tensor is not None else []
if index is not None:
if not isinstance(index, pd.Index):
if isinstance(index, INDEX_TYPE):
self._index = index
index_value = index.index_value
inputs.append(index)
elif isinstance(index, (Base, Entity)):
self._index = index
index = astensor(index)
if index.ndim != 1:
raise ValueError(f"index should be 1-d, got {index.ndim}-d")
index_value = parse_index(
pd.Index([], dtype=index.dtype), index, type(self).__name__
)
inputs.append(index)
else:
index = pd.Index(index)
index_value = parse_index(index, store_data=True)
else:
index_value = parse_index(index, store_data=True)
elif input_tensor is not None:
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
else:
index_value = parse_index(pd.Index([], dtype=object))
if input_tensor is not None:
shape = input_tensor.shape
elif index is not None:
shape = index.shape
else:
shape = (0,)
return self.new_series(
inputs, shape=shape, dtype=self.dtype, index_value=index_value, name=name
)
|
def __call__(self, input_tensor, index, name):
inputs = [input_tensor]
if index is not None:
if not isinstance(index, pd.Index):
if isinstance(index, INDEX_TYPE):
self._index = index
index_value = index.index_value
inputs.append(index)
elif isinstance(index, (Base, Entity)):
self._index = index
index = astensor(index)
if index.ndim != 1:
raise ValueError(f"index should be 1-d, got {index.ndim}-d")
index_value = parse_index(
pd.Index([], dtype=index.dtype), index, type(self).__name__
)
inputs.append(index)
else:
index = pd.Index(index)
index_value = parse_index(index, store_data=True)
else:
index_value = parse_index(index, store_data=True)
else:
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
return self.new_series(
inputs,
shape=input_tensor.shape,
dtype=self.dtype,
index_value=index_value,
name=name,
)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def series_from_tensor(
tensor, index=None, name=None, dtype=None, gpu=None, sparse=False
):
if tensor is not None:
if tensor.ndim > 1 or tensor.ndim <= 0:
raise TypeError(f"Not support create Series from {tensor.ndim} dims tensor")
gpu = tensor.op.gpu if gpu is None else gpu
dtype = dtype or tensor.dtype
else:
gpu = None
dtype = dtype or np.dtype(float)
op = SeriesFromTensor(input_=tensor, dtype=dtype, gpu=gpu, sparse=sparse)
return op(tensor, index, name)
|
def series_from_tensor(tensor, index=None, name=None, gpu=None, sparse=False):
if tensor.ndim > 1 or tensor.ndim <= 0:
raise TypeError(f"Not support create Series from {tensor.ndim} dims tensor")
gpu = tensor.op.gpu if gpu is None else gpu
op = SeriesFromTensor(dtype=tensor.dtype, gpu=gpu, sparse=sparse)
return op(tensor, index, name)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def __call__(self, df_or_series):
inputs = [df_or_series]
shape = list(df_or_series.shape)
index_value = df_or_series.index_value
columns_value = dtypes = None
if df_or_series.ndim == 2:
columns_value = df_or_series.columns_value
dtypes = df_or_series.dtypes
if self._index is not None:
shape[0] = self._index.shape[0]
index_value = asindex(self._index).index_value
self._index = astensor(self._index)
if isinstance(self._index, (Base, Entity)):
inputs.append(self._index)
if self._columns is not None:
shape[1] = self._columns.shape[0]
dtypes = df_or_series.dtypes.reindex(index=self._columns).fillna(
np.dtype(np.float64)
)
columns_value = parse_index(dtypes.index, store_data=True)
if self._fill_value is not None and isinstance(self._fill_value, (Base, Entity)):
inputs.append(self._fill_value)
if df_or_series.ndim == 1:
return self.new_series(
inputs,
shape=tuple(shape),
dtype=df_or_series.dtype,
index_value=index_value,
name=df_or_series.name,
)
else:
return self.new_dataframe(
inputs,
shape=tuple(shape),
dtypes=dtypes,
index_value=index_value,
columns_value=columns_value,
)
|
def __call__(self, df_or_series):
inputs = [df_or_series]
shape = list(df_or_series.shape)
index_value = df_or_series.index_value
columns_value = dtypes = None
if df_or_series.ndim == 2:
columns_value = df_or_series.columns_value
dtypes = df_or_series.dtypes
if self._index is not None:
shape[0] = self._index.shape[0]
index_value = asindex(self._index).index_value
if isinstance(self._index, (Base, Entity)):
inputs.append(self._index)
if self._columns is not None:
shape[1] = self._columns.shape[0]
dtypes = df_or_series.dtypes.reindex(index=self._columns).fillna(
np.dtype(np.float64)
)
columns_value = parse_index(dtypes.index, store_data=True)
if self._fill_value is not None and isinstance(self._fill_value, (Base, Entity)):
inputs.append(self._fill_value)
if df_or_series.ndim == 1:
return self.new_series(
inputs,
shape=shape,
dtype=df_or_series.dtype,
index_value=index_value,
name=df_or_series.name,
)
else:
return self.new_dataframe(
inputs,
shape=shape,
dtypes=dtypes,
index_value=index_value,
columns_value=columns_value,
)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def _sparse_reindex(cls, inp, index=None, columns=None):
if inp.ndim == 2:
columns = inp.columns if columns is None else columns
index_shape = len(index) if index is not None else len(inp)
i_to_columns = dict()
for i, col in enumerate(columns):
if col in inp.dtypes:
if index is None:
i_to_columns[i] = inp[col]
else:
indexer = inp.index.reindex(index)[1]
cond = indexer >= 0
available_indexer = indexer[cond]
del indexer
data = inp[col].iloc[available_indexer].to_numpy()
ind = cond.nonzero()[0]
spmatrix = sps.csc_matrix(
(data, (ind, np.zeros_like(ind))),
shape=(index_shape, 1),
dtype=inp[col].dtype,
)
sparse_array = pd.arrays.SparseArray.from_spmatrix(spmatrix)
# convert to SparseDtype(xxx, np.nan)
# to ensure 0 in sparse_array not converted to np.nan
sparse_array = pd.arrays.SparseArray(
sparse_array.sp_values,
sparse_index=sparse_array.sp_index,
fill_value=np.nan,
dtype=pd.SparseDtype(sparse_array.dtype, np.nan),
)
series = pd.Series(sparse_array, index=index)
i_to_columns[i] = series
else:
ind = index if index is not None else inp.index
i_to_columns[i] = pd.DataFrame.sparse.from_spmatrix(
sps.coo_matrix((index_shape, 1), dtype=np.float64), index=ind
).iloc[:, 0]
df = pd.DataFrame(i_to_columns)
df.columns = columns
return df
else:
indexer = inp.index.reindex(index)[1]
cond = indexer >= 0
available_indexer = indexer[cond]
del indexer
data = inp.iloc[available_indexer].to_numpy()
ind = cond.nonzero()[0]
spmatrix = sps.csc_matrix(
(data, (ind, np.zeros_like(ind))), shape=(len(index), 1), dtype=inp.dtype
)
sparse_array = pd.arrays.SparseArray.from_spmatrix(spmatrix)
# convert to SparseDtype(xxx, np.nan)
# to ensure 0 in sparse_array not converted to np.nan
sparse_array = pd.arrays.SparseArray(
sparse_array.sp_values,
sparse_index=sparse_array.sp_index,
fill_value=np.nan,
dtype=pd.SparseDtype(sparse_array.dtype, np.nan),
)
series = pd.Series(sparse_array, index=index, name=inp.name)
return series
|
def _sparse_reindex(cls, inp, index=None, columns=None):
if inp.ndim == 2:
columns = inp.columns if columns is None else columns
index_shape = len(index) if index is not None else len(inp)
i_to_columns = dict()
for i, col in enumerate(columns):
if col in inp.dtypes:
if index is None:
i_to_columns[i] = inp[col]
else:
indexer = inp.index.reindex(index)[1]
cond = indexer >= 0
available_indexer = indexer[cond]
del indexer
data = inp[col].iloc[available_indexer].to_numpy()
ind = cond.nonzero()[0]
spmatrix = sps.csc_matrix(
(data, (ind, np.zeros_like(ind))),
shape=(index_shape, 1),
dtype=inp[col].dtype,
)
sparse_array = pd.arrays.SparseArray.from_spmatrix(spmatrix)
# convert to SparseDtype(xxx, np.nan)
# to ensure 0 in sparse_array not converted to np.nan
sparse_array = pd.arrays.SparseArray(
sparse_array.sp_values,
sparse_index=sparse_array.sp_index,
fill_value=np.nan,
dtype=pd.SparseDtype(sparse_array.dtype, np.nan),
)
series = pd.Series(sparse_array, index=index)
i_to_columns[i] = series
else:
ind = index if index is not None else inp.index
i_to_columns[i] = pd.DataFrame.sparse.from_spmatrix(
sps.coo_matrix((index_shape, 1), dtype=np.float64), index=ind
).iloc[:, 0]
df = pd.DataFrame(i_to_columns)
df.columns = columns
return df
else:
indexer = inp.index.reindex(index)[1]
spmatrix = sps.lil_matrix((len(index), 1), dtype=inp.dtype)
cond = indexer >= 0
available_indexer = indexer[cond]
del indexer
spmatrix[cond, 0] = inp.iloc[available_indexer].to_numpy()
series = pd.DataFrame.sparse.from_spmatrix(spmatrix, index=index).iloc[:, 0]
series.name = inp.name
return series
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def reindex(df_or_series, *args, **kwargs):
"""
Conform Series/DataFrame to new index with optional filling logic.
Places NA/NaN in locations having no value in the previous index. A new object
is produced unless the new index is equivalent to the current one and
``copy=False``.
Parameters
----------
labels : array-like, optional
New labels / index to conform the axis specified by 'axis' to.
index, columns : array-like, optional
New labels / index to conform to, should be specified using
keywords. Preferably an Index object to avoid duplicating data.
axis : int or str, optional
Axis to target. Can be either the axis name ('index', 'columns')
or number (0, 1).
method : {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}
Method to use for filling holes in reindexed DataFrame.
Please note: this is only applicable to DataFrames/Series with a
monotonically increasing/decreasing index.
* None (default): don't fill gaps
* pad / ffill: Propagate last valid observation forward to next
valid.
* backfill / bfill: Use next valid observation to fill gap.
* nearest: Use nearest valid observations to fill gap.
copy : bool, default True
Return a new object, even if the passed indexes are the same.
level : int or name
Broadcast across a level, matching Index values on the
passed MultiIndex level.
fill_value : scalar, default np.NaN
Value to use for missing values. Defaults to NaN, but can be any
"compatible" value.
limit : int, default None
Maximum number of consecutive elements to forward or backward fill.
tolerance : optional
Maximum distance between original and new labels for inexact
matches. The values of the index at the matching locations most
satisfy the equation ``abs(index[indexer] - target) <= tolerance``.
Tolerance may be a scalar value, which applies the same tolerance
to all values, or list-like, which applies variable tolerance per
element. List-like includes list, tuple, array, Series, and must be
the same size as the index and its dtype must exactly match the
index's type.
Returns
-------
Series/DataFrame with changed index.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex_like : Change to same indices as other DataFrame.
Examples
--------
``DataFrame.reindex`` supports two calling conventions
* ``(index=index_labels, columns=column_labels, ...)``
* ``(labels, axis={'index', 'columns'}, ...)``
We *highly* recommend using keyword arguments to clarify your
intent.
Create a dataframe with some fictional data.
>>> import mars.dataframe as md
>>> index = ['Firefox', 'Chrome', 'Safari', 'IE10', 'Konqueror']
>>> df = md.DataFrame({'http_status': [200, 200, 404, 404, 301],
... 'response_time': [0.04, 0.02, 0.07, 0.08, 1.0]},
... index=index)
>>> df.execute()
http_status response_time
Firefox 200 0.04
Chrome 200 0.02
Safari 404 0.07
IE10 404 0.08
Konqueror 301 1.00
Create a new index and reindex the dataframe. By default
values in the new index that do not have corresponding
records in the dataframe are assigned ``NaN``.
>>> new_index = ['Safari', 'Iceweasel', 'Comodo Dragon', 'IE10',
... 'Chrome']
>>> df.reindex(new_index).execute()
http_status response_time
Safari 404.0 0.07
Iceweasel NaN NaN
Comodo Dragon NaN NaN
IE10 404.0 0.08
Chrome 200.0 0.02
We can fill in the missing values by passing a value to
the keyword ``fill_value``. Because the index is not monotonically
increasing or decreasing, we cannot use arguments to the keyword
``method`` to fill the ``NaN`` values.
>>> df.reindex(new_index, fill_value=0).execute()
http_status response_time
Safari 404 0.07
Iceweasel 0 0.00
Comodo Dragon 0 0.00
IE10 404 0.08
Chrome 200 0.02
>>> df.reindex(new_index, fill_value='missing').execute()
http_status response_time
Safari 404 0.07
Iceweasel missing missing
Comodo Dragon missing missing
IE10 404 0.08
Chrome 200 0.02
We can also reindex the columns.
>>> df.reindex(columns=['http_status', 'user_agent']).execute()
http_status user_agent
Firefox 200 NaN
Chrome 200 NaN
Safari 404 NaN
IE10 404 NaN
Konqueror 301 NaN
Or we can use "axis-style" keyword arguments
>>> df.reindex(['http_status', 'user_agent'], axis="columns").execute()
http_status user_agent
Firefox 200 NaN
Chrome 200 NaN
Safari 404 NaN
IE10 404 NaN
Konqueror 301 NaN
To further illustrate the filling functionality in
``reindex``, we will create a dataframe with a
monotonically increasing index (for example, a sequence
of dates).
>>> date_index = md.date_range('1/1/2010', periods=6, freq='D')
>>> df2 = md.DataFrame({"prices": [100, 101, np.nan, 100, 89, 88]},
... index=date_index)
>>> df2.execute()
prices
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
Suppose we decide to expand the dataframe to cover a wider
date range.
>>> date_index2 = md.date_range('12/29/2009', periods=10, freq='D')
>>> df2.reindex(date_index2).execute()
prices
2009-12-29 NaN
2009-12-30 NaN
2009-12-31 NaN
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
2010-01-07 NaN
The index entries that did not have a value in the original data frame
(for example, '2009-12-29') are by default filled with ``NaN``.
If desired, we can fill in the missing values using one of several
options.
For example, to back-propagate the last valid value to fill the ``NaN``
values, pass ``bfill`` as an argument to the ``method`` keyword.
>>> df2.reindex(date_index2, method='bfill').execute()
prices
2009-12-29 100.0
2009-12-30 100.0
2009-12-31 100.0
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
2010-01-07 NaN
Please note that the ``NaN`` value present in the original dataframe
(at index value 2010-01-03) will not be filled by any of the
value propagation schemes. This is because filling while reindexing
does not look at dataframe values, but only compares the original and
desired indexes. If you do want to fill in the ``NaN`` values present
in the original dataframe, use the ``fillna()`` method.
See the :ref:`user guide <basics.reindexing>` for more.
"""
axes = validate_axis_style_args(df_or_series, args, kwargs, "labels", "reindex")
# Pop these, since the values are in `kwargs` under different names
kwargs.pop("index", None)
if df_or_series.ndim > 1:
kwargs.pop("columns", None)
kwargs.pop("axis", None)
kwargs.pop("labels", None)
method = kwargs.pop("method", None)
level = kwargs.pop("level", None)
copy = kwargs.pop("copy", True)
limit = kwargs.pop("limit", None)
tolerance = kwargs.pop("tolerance", None)
fill_value = kwargs.pop("fill_value", None)
enable_sparse = kwargs.pop("enable_sparse", None)
if kwargs:
raise TypeError(
f'reindex() got an unexpected keyword argument "{list(kwargs.keys())[0]}"'
)
if tolerance is not None: # pragma: no cover
raise NotImplementedError("`tolerance` is not supported yet")
if method == "nearest": # pragma: no cover
raise NotImplementedError("method=nearest is not supported yet")
index = axes.get("index")
index_freq = None
if isinstance(index, (Base, Entity)):
if isinstance(index, DataFrameIndexType):
index_freq = getattr(index.index_value.value, "freq", None)
if not isinstance(index, INDEX_TYPE):
index = astensor(index)
elif index is not None:
index = np.asarray(index)
index_freq = getattr(index, "freq", None)
columns = axes.get("columns")
if isinstance(columns, (Base, Entity)): # pragma: no cover
try:
columns = columns.fetch()
except ValueError:
raise NotImplementedError(
"`columns` need to be executed first if it's a Mars object"
)
elif columns is not None:
columns = np.asarray(columns)
if isinstance(fill_value, (Base, Entity)) and getattr(fill_value, "ndim", 0) != 0:
raise ValueError("fill_value must be a scalar")
op = DataFrameReindex(
index=index,
index_freq=index_freq,
columns=columns,
method=method,
level=level,
fill_value=fill_value,
limit=limit,
enable_sparse=enable_sparse,
)
ret = op(df_or_series)
if copy:
return ret.copy()
return ret
|
def reindex(df_or_series, *args, **kwargs):
"""
Conform Series/DataFrame to new index with optional filling logic.
Places NA/NaN in locations having no value in the previous index. A new object
is produced unless the new index is equivalent to the current one and
``copy=False``.
Parameters
----------
labels : array-like, optional
New labels / index to conform the axis specified by 'axis' to.
index, columns : array-like, optional
New labels / index to conform to, should be specified using
keywords. Preferably an Index object to avoid duplicating data.
axis : int or str, optional
Axis to target. Can be either the axis name ('index', 'columns')
or number (0, 1).
method : {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}
Method to use for filling holes in reindexed DataFrame.
Please note: this is only applicable to DataFrames/Series with a
monotonically increasing/decreasing index.
* None (default): don't fill gaps
* pad / ffill: Propagate last valid observation forward to next
valid.
* backfill / bfill: Use next valid observation to fill gap.
* nearest: Use nearest valid observations to fill gap.
copy : bool, default True
Return a new object, even if the passed indexes are the same.
level : int or name
Broadcast across a level, matching Index values on the
passed MultiIndex level.
fill_value : scalar, default np.NaN
Value to use for missing values. Defaults to NaN, but can be any
"compatible" value.
limit : int, default None
Maximum number of consecutive elements to forward or backward fill.
tolerance : optional
Maximum distance between original and new labels for inexact
matches. The values of the index at the matching locations most
satisfy the equation ``abs(index[indexer] - target) <= tolerance``.
Tolerance may be a scalar value, which applies the same tolerance
to all values, or list-like, which applies variable tolerance per
element. List-like includes list, tuple, array, Series, and must be
the same size as the index and its dtype must exactly match the
index's type.
Returns
-------
Series/DataFrame with changed index.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex_like : Change to same indices as other DataFrame.
Examples
--------
``DataFrame.reindex`` supports two calling conventions
* ``(index=index_labels, columns=column_labels, ...)``
* ``(labels, axis={'index', 'columns'}, ...)``
We *highly* recommend using keyword arguments to clarify your
intent.
Create a dataframe with some fictional data.
>>> import mars.dataframe as md
>>> index = ['Firefox', 'Chrome', 'Safari', 'IE10', 'Konqueror']
>>> df = md.DataFrame({'http_status': [200, 200, 404, 404, 301],
... 'response_time': [0.04, 0.02, 0.07, 0.08, 1.0]},
... index=index)
>>> df.execute()
http_status response_time
Firefox 200 0.04
Chrome 200 0.02
Safari 404 0.07
IE10 404 0.08
Konqueror 301 1.00
Create a new index and reindex the dataframe. By default
values in the new index that do not have corresponding
records in the dataframe are assigned ``NaN``.
>>> new_index = ['Safari', 'Iceweasel', 'Comodo Dragon', 'IE10',
... 'Chrome']
>>> df.reindex(new_index).execute()
http_status response_time
Safari 404.0 0.07
Iceweasel NaN NaN
Comodo Dragon NaN NaN
IE10 404.0 0.08
Chrome 200.0 0.02
We can fill in the missing values by passing a value to
the keyword ``fill_value``. Because the index is not monotonically
increasing or decreasing, we cannot use arguments to the keyword
``method`` to fill the ``NaN`` values.
>>> df.reindex(new_index, fill_value=0).execute()
http_status response_time
Safari 404 0.07
Iceweasel 0 0.00
Comodo Dragon 0 0.00
IE10 404 0.08
Chrome 200 0.02
>>> df.reindex(new_index, fill_value='missing').execute()
http_status response_time
Safari 404 0.07
Iceweasel missing missing
Comodo Dragon missing missing
IE10 404 0.08
Chrome 200 0.02
We can also reindex the columns.
>>> df.reindex(columns=['http_status', 'user_agent']).execute()
http_status user_agent
Firefox 200 NaN
Chrome 200 NaN
Safari 404 NaN
IE10 404 NaN
Konqueror 301 NaN
Or we can use "axis-style" keyword arguments
>>> df.reindex(['http_status', 'user_agent'], axis="columns").execute()
http_status user_agent
Firefox 200 NaN
Chrome 200 NaN
Safari 404 NaN
IE10 404 NaN
Konqueror 301 NaN
To further illustrate the filling functionality in
``reindex``, we will create a dataframe with a
monotonically increasing index (for example, a sequence
of dates).
>>> date_index = md.date_range('1/1/2010', periods=6, freq='D')
>>> df2 = md.DataFrame({"prices": [100, 101, np.nan, 100, 89, 88]},
... index=date_index)
>>> df2.execute()
prices
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
Suppose we decide to expand the dataframe to cover a wider
date range.
>>> date_index2 = md.date_range('12/29/2009', periods=10, freq='D')
>>> df2.reindex(date_index2).execute()
prices
2009-12-29 NaN
2009-12-30 NaN
2009-12-31 NaN
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
2010-01-07 NaN
The index entries that did not have a value in the original data frame
(for example, '2009-12-29') are by default filled with ``NaN``.
If desired, we can fill in the missing values using one of several
options.
For example, to back-propagate the last valid value to fill the ``NaN``
values, pass ``bfill`` as an argument to the ``method`` keyword.
>>> df2.reindex(date_index2, method='bfill').execute()
prices
2009-12-29 100.0
2009-12-30 100.0
2009-12-31 100.0
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
2010-01-07 NaN
Please note that the ``NaN`` value present in the original dataframe
(at index value 2010-01-03) will not be filled by any of the
value propagation schemes. This is because filling while reindexing
does not look at dataframe values, but only compares the original and
desired indexes. If you do want to fill in the ``NaN`` values present
in the original dataframe, use the ``fillna()`` method.
See the :ref:`user guide <basics.reindexing>` for more.
"""
axes = validate_axis_style_args(df_or_series, args, kwargs, "labels", "reindex")
# Pop these, since the values are in `kwargs` under different names
kwargs.pop("index", None)
if df_or_series.ndim > 1:
kwargs.pop("columns", None)
kwargs.pop("axis", None)
kwargs.pop("labels", None)
method = kwargs.pop("method", None)
level = kwargs.pop("level", None)
copy = kwargs.pop("copy", True)
limit = kwargs.pop("limit", None)
tolerance = kwargs.pop("tolerance", None)
fill_value = kwargs.pop("fill_value", None)
enable_sparse = kwargs.pop("enable_sparse", None)
if kwargs:
raise TypeError(
f'reindex() got an unexpected keyword argument "{list(kwargs.keys())[0]}"'
)
if tolerance is not None: # pragma: no cover
raise NotImplementedError("`tolerance` is not supported yet")
if method == "nearest": # pragma: no cover
raise NotImplementedError("method=nearest is not supported yet")
index = axes.get("index")
index_freq = None
if isinstance(index, (Base, Entity)):
if isinstance(index, DataFrameIndexType):
index_freq = getattr(index.index_value.value, "freq", None)
index = astensor(index)
elif index is not None:
index = np.asarray(index)
index_freq = getattr(index, "freq", None)
columns = axes.get("columns")
if isinstance(columns, (Base, Entity)): # pragma: no cover
try:
columns = columns.fetch()
except ValueError:
raise NotImplementedError(
"`columns` need to be executed first if it's a Mars object"
)
elif columns is not None:
columns = np.asarray(columns)
if isinstance(fill_value, (Base, Entity)) and getattr(fill_value, "ndim", 0) != 0:
raise ValueError("fill_value must be a scalar")
op = DataFrameReindex(
index=index,
index_freq=index_freq,
columns=columns,
method=method,
level=level,
fill_value=fill_value,
limit=limit,
enable_sparse=enable_sparse,
)
ret = op(df_or_series)
if copy:
return ret.copy()
return ret
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def __call__(self, target: DataFrame, value):
raw_target = target
inputs = [target]
if np.isscalar(value):
value_dtype = np.array(value).dtype
elif self._is_scalar_tensor(value):
inputs.append(value)
value_dtype = value.dtype
else:
if isinstance(value, (pd.Series, SERIES_TYPE)):
value = asseries(value)
value_dtype = value.dtype
elif is_list_like(value) or isinstance(value, TENSOR_TYPE):
value = asseries(value, index=target.index)
value_dtype = value.dtype
else: # pragma: no cover
raise TypeError(
"Wrong value type, could be one of scalar, Series or tensor"
)
if target.shape[0] == 0:
# target empty, reindex target first
target = target.reindex(value.index)
inputs[0] = target
elif value.index_value.key != target.index_value.key:
# need reindex when target df is not empty and index different
value = value.reindex(target.index)
inputs.append(value)
index_value = target.index_value
dtypes = target.dtypes.copy(deep=True)
dtypes.loc[self._indexes] = value_dtype
columns_value = parse_index(dtypes.index, store_data=True)
ret = self.new_dataframe(
inputs,
shape=(target.shape[0], len(dtypes)),
dtypes=dtypes,
index_value=index_value,
columns_value=columns_value,
)
raw_target.data = ret.data
|
def __call__(self, target: DataFrame, value):
inputs = [target]
if np.isscalar(value):
value_dtype = np.array(value).dtype
elif self._is_scalar_tensor(value):
inputs.append(value)
value_dtype = value.dtype
else:
if isinstance(value, (pd.Series, SERIES_TYPE)):
value = asseries(value)
inputs.append(value)
value_dtype = value.dtype
elif is_list_like(value) or isinstance(value, TENSOR_TYPE):
value = asseries(value, index=target.index)
inputs.append(value)
value_dtype = value.dtype
else: # pragma: no cover
raise TypeError(
"Wrong value type, could be one of scalar, Series or tensor"
)
if value.index_value.key != target.index_value.key: # pragma: no cover
raise NotImplementedError(
"Does not support setting value with different index for now"
)
index_value = target.index_value
dtypes = target.dtypes.copy(deep=True)
dtypes.loc[self._indexes] = value_dtype
columns_value = parse_index(dtypes.index, store_data=True)
ret = self.new_dataframe(
inputs,
shape=(target.shape[0], len(dtypes)),
dtypes=dtypes,
index_value=index_value,
columns_value=columns_value,
)
target.data = ret.data
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def __init__(
self,
data=None,
index=None,
columns=None,
dtype=None,
copy=False,
chunk_size=None,
gpu=None,
sparse=None,
num_partitions=None,
):
# make sure __getattr__ does not result in stack overflow
self._data = None
need_repart = False
if isinstance(data, TENSOR_TYPE):
if chunk_size is not None:
data = data.rechunk(chunk_size)
df = dataframe_from_tensor(
data, index=index, columns=columns, gpu=gpu, sparse=sparse
)
need_repart = num_partitions is not None
elif isinstance(data, SERIES_TYPE):
df = data.to_frame()
need_repart = num_partitions is not None
elif isinstance(data, DATAFRAME_TYPE):
if not hasattr(data, "data"):
# DataFrameData
df = _Frame(data)
else:
df = data
need_repart = num_partitions is not None
elif isinstance(data, dict) and any(
isinstance(v, (Base, Entity)) for v in data.values()
):
# data is a dict and some value is tensor
df = dataframe_from_1d_tileables(
data, index=index, columns=columns, gpu=gpu, sparse=sparse
)
need_repart = num_partitions is not None
elif isinstance(data, list) and any(isinstance(v, (Base, Entity)) for v in data):
# stack data together
data = stack(data)
df = dataframe_from_tensor(
data, index=index, columns=columns, gpu=gpu, sparse=sparse
)
need_repart = num_partitions is not None
elif isinstance(index, (INDEX_TYPE, SERIES_TYPE)):
if isinstance(data, dict):
data = {k: astensor(v, chunk_size=chunk_size) for k, v in data.items()}
df = dataframe_from_1d_tileables(
data, index=index, columns=columns, gpu=gpu, sparse=sparse
)
else:
if data is not None:
data = astensor(data, chunk_size=chunk_size)
df = dataframe_from_tensor(
data, index=index, columns=columns, gpu=gpu, sparse=sparse
)
need_repart = num_partitions is not None
else:
pdf = pd.DataFrame(data, index=index, columns=columns, dtype=dtype, copy=copy)
if num_partitions is not None:
chunk_size = ceildiv(len(pdf), num_partitions)
df = from_pandas_df(pdf, chunk_size=chunk_size, gpu=gpu, sparse=sparse)
if need_repart:
df = df.rebalance(num_partitions=num_partitions)
super().__init__(df.data)
|
def __init__(
self,
data=None,
index=None,
columns=None,
dtype=None,
copy=False,
chunk_size=None,
gpu=None,
sparse=None,
num_partitions=None,
):
# make sure __getattr__ does not result in stack overflow
self._data = None
need_repart = False
if isinstance(data, TENSOR_TYPE):
if chunk_size is not None:
data = data.rechunk(chunk_size)
df = dataframe_from_tensor(
data, index=index, columns=columns, gpu=gpu, sparse=sparse
)
need_repart = num_partitions is not None
elif isinstance(data, SERIES_TYPE):
df = data.to_frame()
need_repart = num_partitions is not None
elif isinstance(data, DATAFRAME_TYPE):
if not hasattr(data, "data"):
# DataFrameData
df = _Frame(data)
else:
df = data
need_repart = num_partitions is not None
elif isinstance(data, dict) and any(
isinstance(v, (Base, Entity)) for v in data.values()
):
# data is a dict and some value is tensor
df = dataframe_from_1d_tileables(
data, index=index, columns=columns, gpu=gpu, sparse=sparse
)
need_repart = num_partitions is not None
elif isinstance(index, (INDEX_TYPE, SERIES_TYPE)):
if isinstance(data, dict):
data = {k: astensor(v, chunk_size=chunk_size) for k, v in data.items()}
df = dataframe_from_1d_tileables(
data, index=index, columns=columns, gpu=gpu, sparse=sparse
)
else:
df = dataframe_from_tensor(
astensor(data, chunk_size=chunk_size),
index=index,
columns=columns,
gpu=gpu,
sparse=sparse,
)
need_repart = num_partitions is not None
else:
pdf = pd.DataFrame(data, index=index, columns=columns, dtype=dtype, copy=copy)
if num_partitions is not None:
chunk_size = ceildiv(len(pdf), num_partitions)
df = from_pandas_df(pdf, chunk_size=chunk_size, gpu=gpu, sparse=sparse)
if need_repart:
df = df.rebalance(num_partitions=num_partitions)
super().__init__(df.data)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def __init__(
self,
data=None,
index=None,
dtype=None,
name=None,
copy=False,
chunk_size=None,
gpu=None,
sparse=None,
num_partitions=None,
):
# make sure __getattr__ does not result in stack overflow
self._data = None
if dtype is not None:
dtype = np.dtype(dtype)
need_repart = False
if isinstance(data, (TENSOR_TYPE, INDEX_TYPE)):
if chunk_size is not None:
data = data.rechunk(chunk_size)
name = name or getattr(data, "name", None)
series = series_from_tensor(
data, index=index, name=name, gpu=gpu, sparse=sparse
)
need_repart = num_partitions is not None
elif isinstance(index, INDEX_TYPE):
if data is not None:
data = astensor(data, chunk_size=chunk_size)
series = series_from_tensor(
data, index=index, name=name, dtype=dtype, gpu=gpu, sparse=sparse
)
need_repart = num_partitions is not None
elif isinstance(data, SERIES_TYPE):
if not hasattr(data, "data"):
# SeriesData
series = _Series(data)
else:
series = data
need_repart = num_partitions is not None
else:
pd_series = pd.Series(data, index=index, dtype=dtype, name=name, copy=copy)
if num_partitions is not None:
chunk_size = ceildiv(len(pd_series), num_partitions)
series = from_pandas_series(
pd_series, chunk_size=chunk_size, gpu=gpu, sparse=sparse
)
if need_repart:
series = series.rebalance(num_partitions=num_partitions)
super().__init__(series.data)
|
def __init__(
self,
data=None,
index=None,
dtype=None,
name=None,
copy=False,
chunk_size=None,
gpu=None,
sparse=None,
num_partitions=None,
):
# make sure __getattr__ does not result in stack overflow
self._data = None
need_repart = False
if isinstance(data, (TENSOR_TYPE, INDEX_TYPE)):
if chunk_size is not None:
data = data.rechunk(chunk_size)
name = name or getattr(data, "name", None)
series = series_from_tensor(
data, index=index, name=name, gpu=gpu, sparse=sparse
)
need_repart = num_partitions is not None
elif isinstance(index, INDEX_TYPE):
series = series_from_tensor(
astensor(data, chunk_size=chunk_size),
index=index,
name=name,
gpu=gpu,
sparse=sparse,
)
need_repart = num_partitions is not None
elif isinstance(data, SERIES_TYPE):
if not hasattr(data, "data"):
# SeriesData
series = _Series(data)
else:
series = data
need_repart = num_partitions is not None
else:
pd_series = pd.Series(data, index=index, dtype=dtype, name=name, copy=copy)
if num_partitions is not None:
chunk_size = ceildiv(len(pd_series), num_partitions)
series = from_pandas_series(
pd_series, chunk_size=chunk_size, gpu=gpu, sparse=sparse
)
if need_repart:
series = series.rebalance(num_partitions=num_partitions)
super().__init__(series.data)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.