after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def parse_code_line(self, status, code):
firstchar = status[0]
if firstchar == "-" or (self.excluding and firstchar in "#=0123456789"):
# remember certain non-executed lines
if self.excluding or is_non_code(code):
self.noncode.add(self.lineno)
return True
if firstchar == "#":
if is_non_code(code):
self.noncode.add(self.lineno)
else:
self.uncovered.add(self.lineno)
return True
if firstchar == "=":
self.uncovered_exceptional.add(self.lineno)
return True
if firstchar in "0123456789":
# GCOV 8 marks partial coverage
# with a trailing "*" after the execution count.
self.covered[self.lineno] = int(status.rstrip("*"))
return True
return False
|
def parse_code_line(self, status, code):
firstchar = status[0]
if firstchar == "-" or (self.excluding and firstchar in "#=0123456789"):
# remember certain non-executed lines
if self.excluding or is_non_code(code):
self.noncode.add(self.lineno)
return True
if firstchar == "#":
if is_non_code(code):
self.noncode.add(self.lineno)
else:
self.uncovered.add(self.lineno)
return True
if firstchar == "=":
self.uncovered_exceptional.add(self.lineno)
return True
if firstchar in "0123456789":
self.covered[self.lineno] = int(status)
return True
return False
|
https://github.com/gcovr/gcovr/issues/226
|
Traceback (most recent call last):
File "/usr/local/bin/gcovr", line 2412, in <module>
process_datafile(file_, covdata, options)
File "/usr/local/bin/gcovr", line 884, in process_datafile
process_gcov_data(fname, covdata, abs_filename, options)
File "/usr/local/bin/gcovr", line 616, in process_gcov_data
covered[lineno] = int(segments[0].strip())
ValueError: invalid literal for int() with base 10: '1*'
|
ValueError
|
def parse_tag_line(self, line, exclude_unreachable_branches):
# Start or end a template/macro specialization section
if line.startswith("-----"):
self.last_was_specialization_section_marker = True
return True
last_was_marker = self.last_was_specialization_section_marker
self.last_was_specialization_section_marker = False
# A specialization section marker is either followed by a section or
# ends it. If it starts a section, the next line contains a function
# name, followed by a colon. A function name cannot be parsed reliably,
# so we assume it is a function, and try to disprove this assumption by
# comparing with other kinds of lines.
if last_was_marker:
# 1. a function must end with a colon
is_function = line.endswith(":")
# 2. a function cannot start with space
if is_function:
is_function = not line.startswith(" ")
# 3. a function cannot start with a tag
if is_function:
tags = "function call branch".split()
is_function = not any(line.startswith(tag + " ") for tag in tags)
# If this line turned out to be a function, discard it.
return True
if line.startswith("function "):
return True
if line.startswith("call "):
return True
if line.startswith("branch "):
exclude_branch = False
if exclude_unreachable_branches and self.lineno == self.last_code_lineno:
if self.last_code_line_excluded:
exclude_branch = True
exclude_reason = "marked with exclude pattern"
else:
code = self.last_code_line
code = re.sub(cpp_style_comment_pattern, "", code)
code = re.sub(c_style_comment_pattern, "", code)
code = code.strip()
code_nospace = code.replace(" ", "")
exclude_branch = code in ["", "{", "}"] or code_nospace == "{}"
exclude_reason = "detected as compiler-generated code"
if exclude_branch:
self.logger.verbose_msg(
"Excluding unreachable branch on line {line} in file {fname}: {reason}",
line=self.lineno,
fname=self.fname,
reason=exclude_reason,
)
return True
fields = line.split() # e.g. "branch 0 taken 0% (fallthrough)"
branch_index = int(fields[1])
try:
count = int(fields[3])
except (ValueError, IndexError):
count = 0
self.branches.setdefault(self.lineno, {})[branch_index] = count
return True
return False
|
def parse_tag_line(self, line, exclude_unreachable_branches):
if line.startswith("function "):
return True
if line.startswith("call "):
return True
if line.startswith("branch "):
exclude_branch = False
if exclude_unreachable_branches and self.lineno == self.last_code_lineno:
if self.last_code_line_excluded:
exclude_branch = True
exclude_reason = "marked with exclude pattern"
else:
code = self.last_code_line
code = re.sub(cpp_style_comment_pattern, "", code)
code = re.sub(c_style_comment_pattern, "", code)
code = code.strip()
code_nospace = code.replace(" ", "")
exclude_branch = code in ["", "{", "}"] or code_nospace == "{}"
exclude_reason = "detected as compiler-generated code"
if exclude_branch:
self.logger.verbose_msg(
"Excluding unreachable branch on line {line} in file {fname}: {reason}",
line=self.lineno,
fname=self.fname,
reason=exclude_reason,
)
return True
fields = line.split() # e.g. "branch 0 taken 0% (fallthrough)"
branch_index = int(fields[1])
try:
count = int(fields[3])
except (ValueError, IndexError):
count = 0
self.branches.setdefault(self.lineno, {})[branch_index] = count
return True
return False
|
https://github.com/gcovr/gcovr/issues/226
|
Traceback (most recent call last):
File "/usr/local/bin/gcovr", line 2412, in <module>
process_datafile(file_, covdata, options)
File "/usr/local/bin/gcovr", line 884, in process_datafile
process_gcov_data(fname, covdata, abs_filename, options)
File "/usr/local/bin/gcovr", line 616, in process_gcov_data
covered[lineno] = int(segments[0].strip())
ValueError: invalid literal for int() with base 10: '1*'
|
ValueError
|
def osm_bldg_download(
polygon=None,
north=None,
south=None,
east=None,
west=None,
timeout=180,
memory=None,
max_query_area_size=50 * 1000 * 50 * 1000,
):
"""
Download OpenStreetMap building footprint data.
Parameters
----------
polygon : shapely Polygon or MultiPolygon
geographic shape to fetch the building footprints within
north : float
northern latitude of bounding box
south : float
southern latitude of bounding box
east : float
eastern longitude of bounding box
west : float
western longitude of bounding box
timeout : int
the timeout interval for requests and to pass to API
memory : int
server memory allocation size for the query, in bytes. If none, server
will use its default allocation size
max_query_area_size : float
max area for any part of the geometry, in the units the geometry is in:
any polygon bigger will get divided up for multiple queries to API
(default is 50,000 * 50,000 units (ie, 50km x 50km in area, if units are
meters))
Returns
-------
list
list of response_json dicts
"""
# check if we're querying by polygon or by bounding box based on which
# argument(s) where passed into this function
by_poly = polygon is not None
by_bbox = not (north is None or south is None or east is None or west is None)
if not (by_poly or by_bbox):
raise ValueError("You must pass a polygon or north, south, east, and west")
response_jsons = []
# pass server memory allocation in bytes for the query to the API
# if None, pass nothing so the server will use its default allocation size
# otherwise, define the query's maxsize parameter value as whatever the
# caller passed in
if memory is None:
maxsize = ""
else:
maxsize = "[maxsize:{}]".format(memory)
# define the query to send the API
if by_bbox:
# turn bbox into a polygon and project to local UTM
polygon = Polygon([(west, south), (east, south), (east, north), (west, north)])
geometry_proj, crs_proj = project_geometry(polygon)
# subdivide it if it exceeds the max area size (in meters), then project
# back to lat-long
geometry_proj_consolidated_subdivided = consolidate_subdivide_geometry(
geometry_proj, max_query_area_size=max_query_area_size
)
geometry, _ = project_geometry(
geometry_proj_consolidated_subdivided, crs=crs_proj, to_latlong=True
)
log(
"Requesting building footprints data within bounding box from API in {:,} request(s)".format(
len(geometry)
)
)
start_time = time.time()
# loop through each polygon rectangle in the geometry (there will only
# be one if original bbox didn't exceed max area size)
for poly in geometry:
# represent bbox as south,west,north,east and round lat-longs to 8
# decimal places (ie, within 1 mm) so URL strings aren't different
# due to float rounding issues (for consistent caching)
west, south, east, north = poly.bounds
query_template = (
'[out:json][timeout:{timeout}]{maxsize};((way["building"]({south:.8f},'
'{west:.8f},{north:.8f},{east:.8f});(._;>;););(relation["building"]'
"({south:.8f},{west:.8f},{north:.8f},{east:.8f});(._;>;);););out;"
)
query_str = query_template.format(
north=north,
south=south,
east=east,
west=west,
timeout=timeout,
maxsize=maxsize,
)
response_json = overpass_request(data={"data": query_str}, timeout=timeout)
response_jsons.append(response_json)
msg = (
"Got all building footprints data within bounding box from "
"API in {:,} request(s) and {:,.2f} seconds"
)
log(msg.format(len(geometry), time.time() - start_time))
elif by_poly:
# project to utm, divide polygon up into sub-polygons if area exceeds a
# max size (in meters), project back to lat-long, then get a list of polygon(s) exterior coordinates
geometry_proj, crs_proj = project_geometry(polygon)
geometry_proj_consolidated_subdivided = consolidate_subdivide_geometry(
geometry_proj, max_query_area_size=max_query_area_size
)
geometry, _ = project_geometry(
geometry_proj_consolidated_subdivided, crs=crs_proj, to_latlong=True
)
polygon_coord_strs = get_polygons_coordinates(geometry)
log(
"Requesting building footprints data within polygon from API in {:,} request(s)".format(
len(polygon_coord_strs)
)
)
start_time = time.time()
# pass each polygon exterior coordinates in the list to the API, one at
# a time
for polygon_coord_str in polygon_coord_strs:
query_template = (
"[out:json][timeout:{timeout}]{maxsize};(way"
'(poly:"{polygon}")["building"];(._;>;);relation'
'(poly:"{polygon}")["building"];(._;>;));out;'
)
query_str = query_template.format(
polygon=polygon_coord_str, timeout=timeout, maxsize=maxsize
)
response_json = overpass_request(data={"data": query_str}, timeout=timeout)
response_jsons.append(response_json)
msg = (
"Got all building footprints data within polygon from API in "
"{:,} request(s) and {:,.2f} seconds"
)
log(msg.format(len(polygon_coord_strs), time.time() - start_time))
return response_jsons
|
def osm_bldg_download(
polygon=None,
north=None,
south=None,
east=None,
west=None,
timeout=180,
memory=None,
max_query_area_size=50 * 1000 * 50 * 1000,
):
"""
Download OpenStreetMap building footprint data.
Parameters
----------
polygon : shapely Polygon or MultiPolygon
geographic shape to fetch the building footprints within
north : float
northern latitude of bounding box
south : float
southern latitude of bounding box
east : float
eastern longitude of bounding box
west : float
western longitude of bounding box
timeout : int
the timeout interval for requests and to pass to API
memory : int
server memory allocation size for the query, in bytes. If none, server
will use its default allocation size
max_query_area_size : float
max area for any part of the geometry, in the units the geometry is in:
any polygon bigger will get divided up for multiple queries to API
(default is 50,000 * 50,000 units (ie, 50km x 50km in area, if units are
meters))
Returns
-------
list
list of response_json dicts
"""
# check if we're querying by polygon or by bounding box based on which
# argument(s) where passed into this function
by_poly = polygon is not None
by_bbox = not (north is None or south is None or east is None or west is None)
if not (by_poly or by_bbox):
raise ValueError("You must pass a polygon or north, south, east, and west")
response_jsons = []
# pass server memory allocation in bytes for the query to the API
# if None, pass nothing so the server will use its default allocation size
# otherwise, define the query's maxsize parameter value as whatever the
# caller passed in
if memory is None:
maxsize = ""
else:
maxsize = "[maxsize:{}]".format(memory)
# define the query to send the API
if by_bbox:
# turn bbox into a polygon and project to local UTM
polygon = Polygon([(west, south), (east, south), (east, north), (west, north)])
geometry_proj, crs_proj = project_geometry(polygon)
# subdivide it if it exceeds the max area size (in meters), then project
# back to lat-long
geometry_proj_consolidated_subdivided = consolidate_subdivide_geometry(
geometry_proj, max_query_area_size=max_query_area_size
)
geometry, _ = project_geometry(
geometry_proj_consolidated_subdivided, crs=crs_proj, to_latlong=True
)
log(
"Requesting building footprints data within bounding box from API in {:,} request(s)".format(
len(geometry)
)
)
start_time = time.time()
# loop through each polygon rectangle in the geometry (there will only
# be one if original bbox didn't exceed max area size)
for poly in geometry:
# represent bbox as south,west,north,east and round lat-longs to 8
# decimal places (ie, within 1 mm) so URL strings aren't different
# due to float rounding issues (for consistent caching)
west, south, east, north = poly.bounds
query_template = (
'[out:json][timeout:{timeout}]{maxsize};((way["building"]({south:.8f},'
'{west:.8f},{north:.8f},{east:.8f});(._;>;););(relation["building"]'
"({south:.8f},{west:.8f},{north:.8f},{east:.8f});(._;>;);));out;"
)
query_str = query_template.format(
north=north,
south=south,
east=east,
west=west,
timeout=timeout,
maxsize=maxsize,
)
response_json = overpass_request(data={"data": query_str}, timeout=timeout)
response_jsons.append(response_json)
msg = (
"Got all building footprints data within bounding box from "
"API in {:,} request(s) and {:,.2f} seconds"
)
log(msg.format(len(geometry), time.time() - start_time))
elif by_poly:
# project to utm, divide polygon up into sub-polygons if area exceeds a
# max size (in meters), project back to lat-long, then get a list of polygon(s) exterior coordinates
geometry_proj, crs_proj = project_geometry(polygon)
geometry_proj_consolidated_subdivided = consolidate_subdivide_geometry(
geometry_proj, max_query_area_size=max_query_area_size
)
geometry, _ = project_geometry(
geometry_proj_consolidated_subdivided, crs=crs_proj, to_latlong=True
)
polygon_coord_strs = get_polygons_coordinates(geometry)
log(
"Requesting building footprints data within polygon from API in {:,} request(s)".format(
len(polygon_coord_strs)
)
)
start_time = time.time()
# pass each polygon exterior coordinates in the list to the API, one at
# a time
for polygon_coord_str in polygon_coord_strs:
query_template = (
"[out:json][timeout:{timeout}]{maxsize};(way"
'(poly:"{polygon}")["building"];(._;>;);relation'
'(poly:"{polygon}")["building"];(._;>;));out;'
)
query_str = query_template.format(
polygon=polygon_coord_str, timeout=timeout, maxsize=maxsize
)
response_json = overpass_request(data={"data": query_str}, timeout=timeout)
response_jsons.append(response_json)
msg = (
"Got all building footprints data within polygon from API in "
"{:,} request(s) and {:,.2f} seconds"
)
log(msg.format(len(polygon_coord_strs), time.time() - start_time))
return response_jsons
|
https://github.com/gboeing/osmnx/issues/158
|
ExceptionTraceback (most recent call last)
<ipython-input-4-40571a0b7fcd> in <module>()
----> 1 gdf = ox.buildings_from_place(place='Piedmont, California, USA')
2 gdf_proj = ox.project_gdf(gdf)
3 fig, ax = ox.plot_buildings(gdf_proj, bgcolor='#333333', color='w', save=True, show=False, close=True, filename='piedmont_bldgs', dpi=40)
4 Image('{}/{}.{}'.format(img_folder, 'piedmont_bldgs', extension), height=size, width=size)
C:\Users\B\Anaconda3\lib\site-packages\osmnx\buildings.pyc in buildings_from_place(place, retain_invalid)
283 city = gdf_from_place(place)
284 polygon = city['geometry'].iloc[0]
--> 285 return create_buildings_gdf(polygon, retain_invalid=retain_invalid)
286
287
C:\Users\B\Anaconda3\lib\site-packages\osmnx\buildings.pyc in create_buildings_gdf(polygon, north, south, east, west, retain_invalid)
159 """
160
--> 161 responses = osm_bldg_download(polygon, north, south, east, west)
162
163 vertices = {}
C:\Users\B\Anaconda3\lib\site-packages\osmnx\buildings.pyc in osm_bldg_download(polygon, north, south, east, west, timeout, memory, max_query_area_size)
125 '(poly:"{polygon}")["building"];(._;>;));out;')
126 query_str = query_template.format(polygon=polygon_coord_str, timeout=timeout, maxsize=maxsize)
--> 127 response_json = overpass_request(data={'data':query_str}, timeout=timeout)
128 response_jsons.append(response_json)
129 msg = ('Got all building footprints data within polygon from API in '
C:\Users\B\Anaconda3\lib\site-packages\osmnx\core.pyc in overpass_request(data, pause_duration, timeout, error_pause_duration)
358 else:
359 log('Server at {} returned status code {} and no JSON data'.format(domain, response.status_code), level=lg.ERROR)
--> 360 raise Exception('Server returned no JSON data.\n{} {}\n{}'.format(response, response.reason, response.text))
361
362 return response_json
Exception: Server returned no JSON data.
<Response [400]> Bad Request
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8" lang="en"/>
<title>OSM3S Response</title>
</head>
<body>
<p>The data included in this document is from www.openstreetmap.org. The data is made available under ODbL.</p>
<p><strong style="color:#FF0000">Error</strong>: line 1: parse error: ';' expected - ')' found. </p>
<p><strong style="color:#FF0000">Error</strong>: line 1: parse error: Unexpected end of input. </p>
<p><strong style="color:#FF0000">Error</strong>: line 1: static error: Element "print" cannot be subelement of element "union". </p>
</body>
</html>
|
Exception
|
def osm_bldg_download(
polygon=None,
north=None,
south=None,
east=None,
west=None,
timeout=180,
memory=None,
max_query_area_size=50 * 1000 * 50 * 1000,
):
"""
Download OpenStreetMap building footprint data.
Parameters
----------
polygon : shapely Polygon or MultiPolygon
geographic shape to fetch the building footprints within
north : float
northern latitude of bounding box
south : float
southern latitude of bounding box
east : float
eastern longitude of bounding box
west : float
western longitude of bounding box
timeout : int
the timeout interval for requests and to pass to API
memory : int
server memory allocation size for the query, in bytes. If none, server
will use its default allocation size
max_query_area_size : float
max area for any part of the geometry, in the units the geometry is in:
any polygon bigger will get divided up for multiple queries to API
(default is 50,000 * 50,000 units (ie, 50km x 50km in area, if units are
meters))
Returns
-------
list
list of response_json dicts
"""
# check if we're querying by polygon or by bounding box based on which
# argument(s) where passed into this function
by_poly = polygon is not None
by_bbox = not (north is None or south is None or east is None or west is None)
if not (by_poly or by_bbox):
raise ValueError("You must pass a polygon or north, south, east, and west")
response_jsons = []
# pass server memory allocation in bytes for the query to the API
# if None, pass nothing so the server will use its default allocation size
# otherwise, define the query's maxsize parameter value as whatever the
# caller passed in
if memory is None:
maxsize = ""
else:
maxsize = "[maxsize:{}]".format(memory)
# define the query to send the API
if by_bbox:
# turn bbox into a polygon and project to local UTM
polygon = Polygon([(west, south), (east, south), (east, north), (west, north)])
geometry_proj, crs_proj = project_geometry(polygon)
# subdivide it if it exceeds the max area size (in meters), then project
# back to lat-long
geometry_proj_consolidated_subdivided = consolidate_subdivide_geometry(
geometry_proj, max_query_area_size=max_query_area_size
)
geometry, _ = project_geometry(
geometry_proj_consolidated_subdivided, crs=crs_proj, to_latlong=True
)
log(
"Requesting building footprints data within bounding box from API in {:,} request(s)".format(
len(geometry)
)
)
start_time = time.time()
# loop through each polygon rectangle in the geometry (there will only
# be one if original bbox didn't exceed max area size)
for poly in geometry:
# represent bbox as south,west,north,east and round lat-longs to 8
# decimal places (ie, within 1 mm) so URL strings aren't different
# due to float rounding issues (for consistent caching)
west, south, east, north = poly.bounds
query_template = (
'[out:json][timeout:{timeout}]{maxsize};((way["building"]({south:.8f},'
'{west:.8f},{north:.8f},{east:.8f});(._;>;););(relation["building"]'
"({south:.8f},{west:.8f},{north:.8f},{east:.8f});(._;>;);););out;"
)
query_str = query_template.format(
north=north,
south=south,
east=east,
west=west,
timeout=timeout,
maxsize=maxsize,
)
response_json = overpass_request(data={"data": query_str}, timeout=timeout)
response_jsons.append(response_json)
msg = (
"Got all building footprints data within bounding box from "
"API in {:,} request(s) and {:,.2f} seconds"
)
log(msg.format(len(geometry), time.time() - start_time))
elif by_poly:
# project to utm, divide polygon up into sub-polygons if area exceeds a
# max size (in meters), project back to lat-long, then get a list of polygon(s) exterior coordinates
geometry_proj, crs_proj = project_geometry(polygon)
geometry_proj_consolidated_subdivided = consolidate_subdivide_geometry(
geometry_proj, max_query_area_size=max_query_area_size
)
geometry, _ = project_geometry(
geometry_proj_consolidated_subdivided, crs=crs_proj, to_latlong=True
)
polygon_coord_strs = get_polygons_coordinates(geometry)
log(
"Requesting building footprints data within polygon from API in {:,} request(s)".format(
len(polygon_coord_strs)
)
)
start_time = time.time()
# pass each polygon exterior coordinates in the list to the API, one at
# a time
for polygon_coord_str in polygon_coord_strs:
query_template = (
"[out:json][timeout:{timeout}]{maxsize};(way"
'(poly:"{polygon}")["building"];(._;>;);relation'
'(poly:"{polygon}")["building"];(._;>;););out;'
)
query_str = query_template.format(
polygon=polygon_coord_str, timeout=timeout, maxsize=maxsize
)
response_json = overpass_request(data={"data": query_str}, timeout=timeout)
response_jsons.append(response_json)
msg = (
"Got all building footprints data within polygon from API in "
"{:,} request(s) and {:,.2f} seconds"
)
log(msg.format(len(polygon_coord_strs), time.time() - start_time))
return response_jsons
|
def osm_bldg_download(
polygon=None,
north=None,
south=None,
east=None,
west=None,
timeout=180,
memory=None,
max_query_area_size=50 * 1000 * 50 * 1000,
):
"""
Download OpenStreetMap building footprint data.
Parameters
----------
polygon : shapely Polygon or MultiPolygon
geographic shape to fetch the building footprints within
north : float
northern latitude of bounding box
south : float
southern latitude of bounding box
east : float
eastern longitude of bounding box
west : float
western longitude of bounding box
timeout : int
the timeout interval for requests and to pass to API
memory : int
server memory allocation size for the query, in bytes. If none, server
will use its default allocation size
max_query_area_size : float
max area for any part of the geometry, in the units the geometry is in:
any polygon bigger will get divided up for multiple queries to API
(default is 50,000 * 50,000 units (ie, 50km x 50km in area, if units are
meters))
Returns
-------
list
list of response_json dicts
"""
# check if we're querying by polygon or by bounding box based on which
# argument(s) where passed into this function
by_poly = polygon is not None
by_bbox = not (north is None or south is None or east is None or west is None)
if not (by_poly or by_bbox):
raise ValueError("You must pass a polygon or north, south, east, and west")
response_jsons = []
# pass server memory allocation in bytes for the query to the API
# if None, pass nothing so the server will use its default allocation size
# otherwise, define the query's maxsize parameter value as whatever the
# caller passed in
if memory is None:
maxsize = ""
else:
maxsize = "[maxsize:{}]".format(memory)
# define the query to send the API
if by_bbox:
# turn bbox into a polygon and project to local UTM
polygon = Polygon([(west, south), (east, south), (east, north), (west, north)])
geometry_proj, crs_proj = project_geometry(polygon)
# subdivide it if it exceeds the max area size (in meters), then project
# back to lat-long
geometry_proj_consolidated_subdivided = consolidate_subdivide_geometry(
geometry_proj, max_query_area_size=max_query_area_size
)
geometry, _ = project_geometry(
geometry_proj_consolidated_subdivided, crs=crs_proj, to_latlong=True
)
log(
"Requesting building footprints data within bounding box from API in {:,} request(s)".format(
len(geometry)
)
)
start_time = time.time()
# loop through each polygon rectangle in the geometry (there will only
# be one if original bbox didn't exceed max area size)
for poly in geometry:
# represent bbox as south,west,north,east and round lat-longs to 8
# decimal places (ie, within 1 mm) so URL strings aren't different
# due to float rounding issues (for consistent caching)
west, south, east, north = poly.bounds
query_template = (
'[out:json][timeout:{timeout}]{maxsize};((way["building"]({south:.8f},'
'{west:.8f},{north:.8f},{east:.8f});(._;>;););(relation["building"]'
"({south:.8f},{west:.8f},{north:.8f},{east:.8f});(._;>;);););out;"
)
query_str = query_template.format(
north=north,
south=south,
east=east,
west=west,
timeout=timeout,
maxsize=maxsize,
)
response_json = overpass_request(data={"data": query_str}, timeout=timeout)
response_jsons.append(response_json)
msg = (
"Got all building footprints data within bounding box from "
"API in {:,} request(s) and {:,.2f} seconds"
)
log(msg.format(len(geometry), time.time() - start_time))
elif by_poly:
# project to utm, divide polygon up into sub-polygons if area exceeds a
# max size (in meters), project back to lat-long, then get a list of polygon(s) exterior coordinates
geometry_proj, crs_proj = project_geometry(polygon)
geometry_proj_consolidated_subdivided = consolidate_subdivide_geometry(
geometry_proj, max_query_area_size=max_query_area_size
)
geometry, _ = project_geometry(
geometry_proj_consolidated_subdivided, crs=crs_proj, to_latlong=True
)
polygon_coord_strs = get_polygons_coordinates(geometry)
log(
"Requesting building footprints data within polygon from API in {:,} request(s)".format(
len(polygon_coord_strs)
)
)
start_time = time.time()
# pass each polygon exterior coordinates in the list to the API, one at
# a time
for polygon_coord_str in polygon_coord_strs:
query_template = (
"[out:json][timeout:{timeout}]{maxsize};(way"
'(poly:"{polygon}")["building"];(._;>;);relation'
'(poly:"{polygon}")["building"];(._;>;));out;'
)
query_str = query_template.format(
polygon=polygon_coord_str, timeout=timeout, maxsize=maxsize
)
response_json = overpass_request(data={"data": query_str}, timeout=timeout)
response_jsons.append(response_json)
msg = (
"Got all building footprints data within polygon from API in "
"{:,} request(s) and {:,.2f} seconds"
)
log(msg.format(len(polygon_coord_strs), time.time() - start_time))
return response_jsons
|
https://github.com/gboeing/osmnx/issues/158
|
ExceptionTraceback (most recent call last)
<ipython-input-4-40571a0b7fcd> in <module>()
----> 1 gdf = ox.buildings_from_place(place='Piedmont, California, USA')
2 gdf_proj = ox.project_gdf(gdf)
3 fig, ax = ox.plot_buildings(gdf_proj, bgcolor='#333333', color='w', save=True, show=False, close=True, filename='piedmont_bldgs', dpi=40)
4 Image('{}/{}.{}'.format(img_folder, 'piedmont_bldgs', extension), height=size, width=size)
C:\Users\B\Anaconda3\lib\site-packages\osmnx\buildings.pyc in buildings_from_place(place, retain_invalid)
283 city = gdf_from_place(place)
284 polygon = city['geometry'].iloc[0]
--> 285 return create_buildings_gdf(polygon, retain_invalid=retain_invalid)
286
287
C:\Users\B\Anaconda3\lib\site-packages\osmnx\buildings.pyc in create_buildings_gdf(polygon, north, south, east, west, retain_invalid)
159 """
160
--> 161 responses = osm_bldg_download(polygon, north, south, east, west)
162
163 vertices = {}
C:\Users\B\Anaconda3\lib\site-packages\osmnx\buildings.pyc in osm_bldg_download(polygon, north, south, east, west, timeout, memory, max_query_area_size)
125 '(poly:"{polygon}")["building"];(._;>;));out;')
126 query_str = query_template.format(polygon=polygon_coord_str, timeout=timeout, maxsize=maxsize)
--> 127 response_json = overpass_request(data={'data':query_str}, timeout=timeout)
128 response_jsons.append(response_json)
129 msg = ('Got all building footprints data within polygon from API in '
C:\Users\B\Anaconda3\lib\site-packages\osmnx\core.pyc in overpass_request(data, pause_duration, timeout, error_pause_duration)
358 else:
359 log('Server at {} returned status code {} and no JSON data'.format(domain, response.status_code), level=lg.ERROR)
--> 360 raise Exception('Server returned no JSON data.\n{} {}\n{}'.format(response, response.reason, response.text))
361
362 return response_json
Exception: Server returned no JSON data.
<Response [400]> Bad Request
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8" lang="en"/>
<title>OSM3S Response</title>
</head>
<body>
<p>The data included in this document is from www.openstreetmap.org. The data is made available under ODbL.</p>
<p><strong style="color:#FF0000">Error</strong>: line 1: parse error: ';' expected - ')' found. </p>
<p><strong style="color:#FF0000">Error</strong>: line 1: parse error: Unexpected end of input. </p>
<p><strong style="color:#FF0000">Error</strong>: line 1: static error: Element "print" cannot be subelement of element "union". </p>
</body>
</html>
|
Exception
|
def truncate_graph_polygon(
G,
polygon,
retain_all=False,
truncate_by_edge=False,
quadrat_width=0.05,
min_num=3,
buffer_amount=1e-9,
):
"""
Remove every node in graph that falls outside some shapely Polygon or
MultiPolygon.
Parameters
----------
G : networkx multidigraph
polygon : Polygon or MultiPolygon
only retain nodes in graph that lie within this geometry
retain_all : bool
if True, return the entire graph even if it is not connected
truncate_by_edge : bool
if True retain node if it's outside polygon but at least one of node's
neighbors are within polygon (NOT CURRENTLY IMPLEMENTED)
quadrat_width : numeric
passed on to intersect_index_quadrats: the linear length (in degrees) of
the quadrats with which to cut up the geometry (default = 0.05, approx
4km at NYC's latitude)
min_num : int
passed on to intersect_index_quadrats: the minimum number of linear
quadrat lines (e.g., min_num=3 would produce a quadrat grid of 4
squares)
buffer_amount : numeric
passed on to intersect_index_quadrats: buffer the quadrat grid lines by
quadrat_width times buffer_amount
Returns
-------
networkx multidigraph
"""
start_time = time.time()
G = G.copy()
log("Identifying all nodes that lie outside the polygon...")
# get a GeoDataFrame of all the nodes
node_geom = [Point(data["x"], data["y"]) for _, data in G.nodes(data=True)]
gdf_nodes = gpd.GeoDataFrame({"node": list(G.nodes()), "geometry": node_geom})
gdf_nodes.crs = G.graph["crs"]
# find all the nodes in the graph that lie outside the polygon
points_within_geometry = intersect_index_quadrats(
gdf_nodes,
polygon,
quadrat_width=quadrat_width,
min_num=min_num,
buffer_amount=buffer_amount,
)
nodes_outside_polygon = gdf_nodes[
~gdf_nodes.index.isin(points_within_geometry.index)
]
# now remove from the graph all those nodes that lie outside the place
# polygon
start_time = time.time()
G.remove_nodes_from(nodes_outside_polygon["node"])
log(
"Removed {:,} nodes outside polygon in {:,.2f} seconds".format(
len(nodes_outside_polygon), time.time() - start_time
)
)
# remove any isolated nodes and retain only the largest component (if retain_all is False)
if not retain_all:
G = remove_isolated_nodes(G)
G = get_largest_component(G)
return G
|
def truncate_graph_polygon(
G,
polygon,
retain_all=False,
truncate_by_edge=False,
quadrat_width=0.05,
min_num=3,
buffer_amount=1e-9,
):
"""
Remove every node in graph that falls outside some shapely Polygon or
MultiPolygon.
Parameters
----------
G : networkx multidigraph
polygon : Polygon or MultiPolygon
only retain nodes in graph that lie within this geometry
retain_all : bool
if True, return the entire graph even if it is not connected
truncate_by_edge : bool
if True retain node if it's outside polygon but at least one of node's
neighbors are within polygon (NOT CURRENTLY IMPLEMENTED)
quadrat_width : numeric
passed on to intersect_index_quadrats: the linear length (in degrees) of
the quadrats with which to cut up the geometry (default = 0.05, approx
4km at NYC's latitude)
min_num : int
passed on to intersect_index_quadrats: the minimum number of linear
quadrat lines (e.g., min_num=3 would produce a quadrat grid of 4
squares)
buffer_amount : numeric
passed on to intersect_index_quadrats: buffer the quadrat grid lines by
quadrat_width times buffer_amount
Returns
-------
networkx multidigraph
"""
start_time = time.time()
G = G.copy()
log("Identifying all nodes that lie outside the polygon...")
# get a GeoDataFrame of all the nodes, for spatial analysis
node_geom = [Point(data["x"], data["y"]) for _, data in G.nodes(data=True)]
gdf_nodes = gpd.GeoDataFrame({"node": pd.Series(G.nodes()), "geometry": node_geom})
gdf_nodes.crs = G.graph["crs"]
# find all the nodes in the graph that lie outside the polygon
points_within_geometry = intersect_index_quadrats(
gdf_nodes,
polygon,
quadrat_width=quadrat_width,
min_num=min_num,
buffer_amount=buffer_amount,
)
nodes_outside_polygon = gdf_nodes[
~gdf_nodes.index.isin(points_within_geometry.index)
]
# now remove from the graph all those nodes that lie outside the place
# polygon
start_time = time.time()
G.remove_nodes_from(nodes_outside_polygon["node"])
log(
"Removed {:,} nodes outside polygon in {:,.2f} seconds".format(
len(nodes_outside_polygon), time.time() - start_time
)
)
# remove any isolated nodes and retain only the largest component (if retain_all is False)
if not retain_all:
G = remove_isolated_nodes(G)
G = get_largest_component(G)
return G
|
https://github.com/gboeing/osmnx/issues/9
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-2-0d3afc68e96a> in <module>()
----> 1 ox.plot_graph(ox.graph_from_place('Modena, Italy'))
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in graph_from_place(query, network_type, simplify, retain_all, truncate_by_edge, name, which_result, buffer_dist, timeout, memory, max_query_area_size, clean_periphery)
1799 if isinstance(query, str) or isinstance(query, dict):
1800 # if it is a string (place name) or dict (structured place query), then it is a single place
-> 1801 gdf_place = gdf_from_place(query, which_result=which_result, buffer_dist=buffer_dist)
1802 name = query
1803 elif isinstance(query, list):
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in gdf_from_place(query, gdf_name, which_result, buffer_dist)
522
523 # get the data from OSM
--> 524 data = osm_polygon_download(query, limit=which_result)
525 if len(data) >= which_result:
526
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in osm_polygon_download(query, limit, polygon_geojson, pause_duration)
495
496 # request the URL, return the JSON
--> 497 response_json = nominatim_request(params=params, timeout=30)
498 return response_json
499
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in nominatim_request(params, pause_duration, timeout, error_pause_duration)
373 start_time = time.time()
374 log('Requesting {} with timeout={}'.format(prepared_url, timeout))
--> 375 response = requests.get(url, params, timeout=timeout)
376
377 # get the response size and the domain, log result
TypeError: get() takes 1 positional argument but 2 were given
|
TypeError
|
def add_paths(G, paths, bidirectional=False):
"""
Add a collection of paths to the graph.
Parameters
----------
G : networkx multidigraph
paths : dict
the paths from OSM
bidirectional : bool
if True, create bidirectional edges for one-way streets
Returns
-------
None
"""
# the list of values OSM uses in its 'oneway' tag to denote True
# updated list of of values OSM uses based on https://www.geofabrik.de/de/data/geofabrik-osm-gis-standard-0.7.pdf
osm_oneway_values = ["yes", "true", "1", "-1", "T", "F"]
for data in paths.values():
if settings.all_oneway is True:
add_path(G, data, one_way=True)
# if this path is tagged as one-way and if it is not a walking network,
# then we'll add the path in one direction only
elif (
"oneway" in data and data["oneway"] in osm_oneway_values
) and not bidirectional:
if data["oneway"] == "-1" or data["oneway"] == "T":
# paths with a one-way value of -1 or T are one-way, but in the
# reverse direction of the nodes' order, see osm documentation
data["nodes"] = list(reversed(data["nodes"]))
# add this path (in only one direction) to the graph
add_path(G, data, one_way=True)
elif (
"junction" in data and data["junction"] == "roundabout"
) and not bidirectional:
# roundabout are also oneway but not tagged as is
add_path(G, data, one_way=True)
# else, this path is not tagged as one-way or it is a walking network
# (you can walk both directions on a one-way street)
else:
# add this path (in both directions) to the graph and set its
# 'oneway' attribute to False. if this is a walking network, this
# may very well be a one-way street (as cars/bikes go), but in a
# walking-only network it is a bi-directional edge
add_path(G, data, one_way=False)
return G
|
def add_paths(G, paths, bidirectional=False):
"""
Add a collection of paths to the graph.
Parameters
----------
G : networkx multidigraph
paths : dict
the paths from OSM
bidirectional : bool
if True, create bidirectional edges for one-way streets
Returns
-------
None
"""
# the list of values OSM uses in its 'oneway' tag to denote True
osm_oneway_values = ["yes", "true", "1", "-1"]
for data in paths.values():
if settings.all_oneway is True:
add_path(G, data, one_way=True)
# if this path is tagged as one-way and if it is not a walking network,
# then we'll add the path in one direction only
elif (
"oneway" in data and data["oneway"] in osm_oneway_values
) and not bidirectional:
if data["oneway"] == "-1":
# paths with a one-way value of -1 are one-way, but in the
# reverse direction of the nodes' order, see osm documentation
data["nodes"] = list(reversed(data["nodes"]))
# add this path (in only one direction) to the graph
add_path(G, data, one_way=True)
elif (
"junction" in data and data["junction"] == "roundabout"
) and not bidirectional:
# roundabout are also oneway but not tagged as is
add_path(G, data, one_way=True)
# else, this path is not tagged as one-way or it is a walking network
# (you can walk both directions on a one-way street)
else:
# add this path (in both directions) to the graph and set its
# 'oneway' attribute to False. if this is a walking network, this
# may very well be a one-way street (as cars/bikes go), but in a
# walking-only network it is a bi-directional edge
add_path(G, data, one_way=False)
return G
|
https://github.com/gboeing/osmnx/issues/9
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-2-0d3afc68e96a> in <module>()
----> 1 ox.plot_graph(ox.graph_from_place('Modena, Italy'))
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in graph_from_place(query, network_type, simplify, retain_all, truncate_by_edge, name, which_result, buffer_dist, timeout, memory, max_query_area_size, clean_periphery)
1799 if isinstance(query, str) or isinstance(query, dict):
1800 # if it is a string (place name) or dict (structured place query), then it is a single place
-> 1801 gdf_place = gdf_from_place(query, which_result=which_result, buffer_dist=buffer_dist)
1802 name = query
1803 elif isinstance(query, list):
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in gdf_from_place(query, gdf_name, which_result, buffer_dist)
522
523 # get the data from OSM
--> 524 data = osm_polygon_download(query, limit=which_result)
525 if len(data) >= which_result:
526
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in osm_polygon_download(query, limit, polygon_geojson, pause_duration)
495
496 # request the URL, return the JSON
--> 497 response_json = nominatim_request(params=params, timeout=30)
498 return response_json
499
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in nominatim_request(params, pause_duration, timeout, error_pause_duration)
373 start_time = time.time()
374 log('Requesting {} with timeout={}'.format(prepared_url, timeout))
--> 375 response = requests.get(url, params, timeout=timeout)
376
377 # get the response size and the domain, log result
TypeError: get() takes 1 positional argument but 2 were given
|
TypeError
|
def osm_polygon_download(query, limit=1, polygon_geojson=1):
"""
Geocode a place and download its boundary geometry from OSM's Nominatim API.
Parameters
----------
query : string or dict
query string or structured query dict to geocode/download
limit : int
max number of results to return
polygon_geojson : int
request the boundary geometry polygon from the API, 0=no, 1=yes
Returns
-------
dict
"""
# define the parameters
params = OrderedDict()
params["format"] = "json"
params["limit"] = limit
params["dedupe"] = (
0 # prevent OSM from deduping results so we get precisely 'limit' # of results
)
params["polygon_geojson"] = polygon_geojson
# add the structured query dict (if provided) to params, otherwise query
# with place name string
if isinstance(query, str):
params["q"] = query
elif isinstance(query, dict):
# add the query keys in alphabetical order so the URL is the same string
# each time, for caching purposes
for key in sorted(list(query.keys())):
params[key] = query[key]
else:
raise TypeError("query must be a dict or a string")
# request the URL, return the JSON
response_json = nominatim_request(params=params, timeout=30)
return response_json
|
def osm_polygon_download(query, limit=1, polygon_geojson=1):
"""
Geocode a place and download its boundary geometry from OSM's Nominatim API.
Parameters
----------
query : string or dict
query string or structured query dict to geocode/download
limit : int
max number of results to return
polygon_geojson : int
request the boundary geometry polygon from the API, 0=no, 1=yes
Returns
-------
dict
"""
# define the parameters
params = OrderedDict()
params["format"] = "json"
params["limit"] = limit
params["dedupe"] = (
0 # this prevents OSM from de-duping results so we're guaranteed to get precisely 'limit' number of results
)
params["polygon_geojson"] = polygon_geojson
# add the structured query dict (if provided) to params, otherwise query
# with place name string
if isinstance(query, str):
params["q"] = query
elif isinstance(query, dict):
# add the query keys in alphabetical order so the URL is the same string
# each time, for caching purposes
for key in sorted(list(query.keys())):
params[key] = query[key]
else:
raise TypeError("query must be a dict or a string")
# request the URL, return the JSON
response_json = nominatim_request(params=params, timeout=30)
return response_json
|
https://github.com/gboeing/osmnx/issues/9
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-2-0d3afc68e96a> in <module>()
----> 1 ox.plot_graph(ox.graph_from_place('Modena, Italy'))
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in graph_from_place(query, network_type, simplify, retain_all, truncate_by_edge, name, which_result, buffer_dist, timeout, memory, max_query_area_size, clean_periphery)
1799 if isinstance(query, str) or isinstance(query, dict):
1800 # if it is a string (place name) or dict (structured place query), then it is a single place
-> 1801 gdf_place = gdf_from_place(query, which_result=which_result, buffer_dist=buffer_dist)
1802 name = query
1803 elif isinstance(query, list):
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in gdf_from_place(query, gdf_name, which_result, buffer_dist)
522
523 # get the data from OSM
--> 524 data = osm_polygon_download(query, limit=which_result)
525 if len(data) >= which_result:
526
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in osm_polygon_download(query, limit, polygon_geojson, pause_duration)
495
496 # request the URL, return the JSON
--> 497 response_json = nominatim_request(params=params, timeout=30)
498 return response_json
499
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in nominatim_request(params, pause_duration, timeout, error_pause_duration)
373 start_time = time.time()
374 log('Requesting {} with timeout={}'.format(prepared_url, timeout))
--> 375 response = requests.get(url, params, timeout=timeout)
376
377 # get the response size and the domain, log result
TypeError: get() takes 1 positional argument but 2 were given
|
TypeError
|
def redistribute_vertices(geom, dist):
"""
Redistribute the vertices on a projected LineString or MultiLineString. The distance
argument is only approximate since the total distance of the linestring may not be
a multiple of the preferred distance. This function works on only [Multi]LineString
geometry types.
This code is adapted from an answer by Mike T from this original question:
https://stackoverflow.com/questions/34906124/interpolating-every-x-distance-along-multiline-in-shapely
Parameters
----------
geom : LineString or MultiLineString
a Shapely geometry
dist : float
spacing length along edges. Units are the same as the geom; Degrees for unprojected geometries and meters
for projected geometries. The smaller the value, the more points are created.
Returns
-------
list of Point geometries : list
"""
if geom.geom_type == "LineString":
num_vert = int(round(geom.length / dist))
if num_vert == 0:
num_vert = 1
return [
geom.interpolate(float(n) / num_vert, normalized=True)
for n in range(num_vert + 1)
]
elif geom.geom_type == "MultiLineString":
parts = [redistribute_vertices(part, dist) for part in geom]
return type(geom)([p for p in parts if not p])
else:
raise ValueError("unhandled geometry {}".format(geom.geom_type))
|
def redistribute_vertices(geom, dist):
"""
Redistribute the vertices on a projected LineString or MultiLineString. The distance
argument is only approximate since the total distance of the linestring may not be
a multiple of the preferred distance. This function works on only [Multi]LineString
geometry types.
This code is adapted from an answer by Mike T from this original question:
https://stackoverflow.com/questions/34906124/interpolating-every-x-distance-along-multiline-in-shapely
Parameters
----------
geom : LineString or MultiLineString
a Shapely geometry
dist : float
spacing length along edges. Units are the same as the geom; Degrees for unprojected geometries and meters
for projected geometries. The smaller the value, the more points are created.
Returns
-------
list of Point geometries : list
"""
if geom.geom_type == "LineString":
num_vert = int(round(geom.length / dist))
if num_vert == 0:
num_vert = 1
return [
geom.interpolate(float(n) / num_vert, normalized=True)
for n in range(num_vert + 1)
]
elif geom.geom_type == "MultiLineString":
parts = [redistribute_vertices(part, dist) for part in geom]
return type(geom)([p for p in parts if not p.is_empty])
else:
raise ValueError("unhandled geometry {}".format(geom.geom_type))
|
https://github.com/gboeing/osmnx/issues/9
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-2-0d3afc68e96a> in <module>()
----> 1 ox.plot_graph(ox.graph_from_place('Modena, Italy'))
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in graph_from_place(query, network_type, simplify, retain_all, truncate_by_edge, name, which_result, buffer_dist, timeout, memory, max_query_area_size, clean_periphery)
1799 if isinstance(query, str) or isinstance(query, dict):
1800 # if it is a string (place name) or dict (structured place query), then it is a single place
-> 1801 gdf_place = gdf_from_place(query, which_result=which_result, buffer_dist=buffer_dist)
1802 name = query
1803 elif isinstance(query, list):
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in gdf_from_place(query, gdf_name, which_result, buffer_dist)
522
523 # get the data from OSM
--> 524 data = osm_polygon_download(query, limit=which_result)
525 if len(data) >= which_result:
526
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in osm_polygon_download(query, limit, polygon_geojson, pause_duration)
495
496 # request the URL, return the JSON
--> 497 response_json = nominatim_request(params=params, timeout=30)
498 return response_json
499
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in nominatim_request(params, pause_duration, timeout, error_pause_duration)
373 start_time = time.time()
374 log('Requesting {} with timeout={}'.format(prepared_url, timeout))
--> 375 response = requests.get(url, params, timeout=timeout)
376
377 # get the response size and the domain, log result
TypeError: get() takes 1 positional argument but 2 were given
|
TypeError
|
def geocode(query):
"""
Geocode a query string to (lat, lon) with the Nominatim geocoder.
Parameters
----------
query : string
the query string to geocode
Returns
-------
point : tuple
the (lat, lon) coordinates returned by the geocoder
"""
# define the parameters
params = OrderedDict()
params["format"] = "json"
params["limit"] = 1
params["dedupe"] = (
0 # prevent OSM from deduping results so we get precisely 'limit' # of results
)
params["q"] = query
response_json = nominatim_request(params=params, timeout=30)
# if results were returned, parse lat and long out of the result
if (
len(response_json) > 0
and "lat" in response_json[0]
and "lon" in response_json[0]
):
lat = float(response_json[0]["lat"])
lon = float(response_json[0]["lon"])
point = (lat, lon)
log('Geocoded "{}" to {}'.format(query, point))
return point
else:
raise Exception(
'Nominatim geocoder returned no results for query "{}"'.format(query)
)
|
def geocode(query):
"""
Geocode a query string to (lat, lon) with the Nominatim geocoder.
Parameters
----------
query : string
the query string to geocode
Returns
-------
point : tuple
the (lat, lon) coordinates returned by the geocoder
"""
# send the query to the nominatim geocoder and parse the json response
url_template = "https://nominatim.openstreetmap.org/search?format=json&limit=1&q={}"
url = url_template.format(query)
response = requests.get(url, timeout=60)
results = response.json()
# if results were returned, parse lat and long out of the result
if len(results) > 0 and "lat" in results[0] and "lon" in results[0]:
lat = float(results[0]["lat"])
lon = float(results[0]["lon"])
point = (lat, lon)
log('Geocoded "{}" to {}'.format(query, point))
return point
else:
raise Exception(
'Nominatim geocoder returned no results for query "{}"'.format(query)
)
|
https://github.com/gboeing/osmnx/issues/9
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-2-0d3afc68e96a> in <module>()
----> 1 ox.plot_graph(ox.graph_from_place('Modena, Italy'))
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in graph_from_place(query, network_type, simplify, retain_all, truncate_by_edge, name, which_result, buffer_dist, timeout, memory, max_query_area_size, clean_periphery)
1799 if isinstance(query, str) or isinstance(query, dict):
1800 # if it is a string (place name) or dict (structured place query), then it is a single place
-> 1801 gdf_place = gdf_from_place(query, which_result=which_result, buffer_dist=buffer_dist)
1802 name = query
1803 elif isinstance(query, list):
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in gdf_from_place(query, gdf_name, which_result, buffer_dist)
522
523 # get the data from OSM
--> 524 data = osm_polygon_download(query, limit=which_result)
525 if len(data) >= which_result:
526
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in osm_polygon_download(query, limit, polygon_geojson, pause_duration)
495
496 # request the URL, return the JSON
--> 497 response_json = nominatim_request(params=params, timeout=30)
498 return response_json
499
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in nominatim_request(params, pause_duration, timeout, error_pause_duration)
373 start_time = time.time()
374 log('Requesting {} with timeout={}'.format(prepared_url, timeout))
--> 375 response = requests.get(url, params, timeout=timeout)
376
377 # get the response size and the domain, log result
TypeError: get() takes 1 positional argument but 2 were given
|
TypeError
|
def save_as_osm(
data,
node_tags=settings.osm_xml_node_tags,
node_attrs=settings.osm_xml_node_attrs,
edge_tags=settings.osm_xml_way_tags,
edge_attrs=settings.osm_xml_way_attrs,
oneway=False,
merge_edges=True,
edge_tag_aggs=None,
filename="graph.osm",
folder=None,
):
"""
Save a graph as an OSM XML formatted file. NOTE: for very large
networks this method can take upwards of 30+ minutes to finish.
Parameters
__________
data : networkx multi(di)graph OR a length 2 iterable of nodes/edges
geopandas.GeoDataFrames
filename : string
the name of the osm file (including file extension)
folder : string
the folder to contain the file, if None, use default data folder
node_attrs: list
osm node attributes to include in output OSM XML
edge_tags : list
osm way tags to include in output OSM XML
edge_attrs : list
osm way attributes to include in output OSM XML
oneway : bool
the default oneway value used to fill this tag where missing
merge_edges : bool
if True merges graph edges such that each OSM way has one entry
and one entry only in the OSM XML. Otherwise, every OSM way
will have a separate entry for each node pair it contains.
edge_tag_aggs : list of length-2 string tuples
useful only if merge_edges is True, this argument allows the user
to specify edge attributes to aggregate such that the merged
OSM way entry tags accurately represent the sum total of
their component edge attributes. For example, if the user
wants the OSM way to have a "length" attribute, the user must
specify `edge_tag_aggs=[('length', 'sum')]` in order to tell
this method to aggregate the lengths of the individual
component edges. Otherwise, the length attribute will simply
reflect the length of the first edge associated with the way.
Returns
-------
None
"""
start_time = time.time()
if folder is None:
folder = settings.data_folder
try:
assert settings.all_oneway
except AssertionError:
raise UserWarning(
"In order for ox.save_as_osm() to behave properly "
"the graph must have been created with the 'all_oneway' "
"setting set to True."
)
try:
gdf_nodes, gdf_edges = data
except ValueError:
gdf_nodes, gdf_edges = graph_to_gdfs(
data, node_geometry=False, fill_edge_geometry=False
)
# rename columns per osm specification
gdf_nodes.rename(columns={"osmid": "id", "x": "lon", "y": "lat"}, inplace=True)
if "id" in gdf_edges.columns:
gdf_edges = gdf_edges[[col for col in gdf_edges if col != "id"]]
if "uniqueid" in gdf_edges.columns:
gdf_edges = gdf_edges.rename(columns={"uniqueid": "id"})
else:
gdf_edges = gdf_edges.reset_index().rename(columns={"index": "id"})
# add default values for required attributes
for table in [gdf_nodes, gdf_edges]:
table["uid"] = "1"
table["user"] = "osmnx"
table["version"] = "1"
table["changeset"] = "1"
table["timestamp"] = "2017-01-01T00:00:00Z"
# convert all datatypes to str
nodes = gdf_nodes.applymap(str)
edges = gdf_edges.applymap(str)
# misc. string replacements to meet OSM XML spec
if "oneway" in edges.columns:
# fill blank oneway tags with default (False)
edges.loc[pd.isnull(edges["oneway"]), "oneway"] = oneway
edges.loc[:, "oneway"] = edges["oneway"].astype(str)
edges.loc[:, "oneway"] = (
edges["oneway"].str.replace("False", "no").replace("True", "yes")
)
# initialize XML tree with an OSM root element
root = etree.Element("osm", attrib={"version": "1", "generator": "OSMnx"})
# append nodes to the XML tree
for i, row in nodes.iterrows():
node = etree.SubElement(root, "node", attrib=row[node_attrs].dropna().to_dict())
for tag in node_tags:
if tag in nodes.columns:
etree.SubElement(node, "tag", attrib={"k": tag, "v": row[tag]})
# append edges to the XML tree
if merge_edges:
for e in edges["id"].unique():
all_way_edges = edges[edges["id"] == e]
first = all_way_edges.iloc[0]
edge = etree.SubElement(
root, "way", attrib=first[edge_attrs].dropna().to_dict()
)
if len(all_way_edges) == 1:
etree.SubElement(edge, "nd", attrib={"ref": first["u"]})
etree.SubElement(edge, "nd", attrib={"ref": first["v"]})
else:
# topological sort
ordered_nodes = get_unique_nodes_ordered_from_way(all_way_edges)
for node in ordered_nodes:
etree.SubElement(edge, "nd", attrib={"ref": node})
if edge_tag_aggs is None:
for tag in edge_tags:
if tag in all_way_edges.columns:
etree.SubElement(
edge, "tag", attrib={"k": tag, "v": first[tag]}
)
else:
for tag in edge_tags:
if tag in all_way_edges.columns:
if tag not in [t for t, agg in edge_tag_aggs]:
etree.SubElement(
edge, "tag", attrib={"k": tag, "v": first[tag]}
)
for tag, agg in edge_tag_aggs:
if tag in all_way_edges.columns:
etree.SubElement(
edge,
"tag",
attrib={"k": tag, "v": all_way_edges[tag].aggregate(agg)},
)
else:
# NOTE: this will generate separate OSM ways for each network edge,
# even if the edges are all part of the same original OSM way. As
# such, each way will be comprised of two nodes, and there will be
# many ways with the same OSM id. This does not conform to the
# OSM XML schema standard, however, the data will still comprise a
# valid network and will be readable by *most* OSM tools.
for i, row in edges.iterrows():
edge = etree.SubElement(
root, "way", attrib=row[edge_attrs].dropna().to_dict()
)
etree.SubElement(edge, "nd", attrib={"ref": row["u"]})
etree.SubElement(edge, "nd", attrib={"ref": row["v"]})
for tag in edge_tags:
if tag in edges.columns:
etree.SubElement(edge, "tag", attrib={"k": tag, "v": row[tag]})
et = etree.ElementTree(root)
if not os.path.exists(folder):
os.makedirs(folder)
et.write(os.path.join(folder, filename))
log(
'Saved graph to disk as OSM at "{}" in {:,.2f} seconds'.format(
os.path.join(folder, filename), time.time() - start_time
)
)
|
def save_as_osm(
data,
node_tags=settings.osm_xml_node_tags,
node_attrs=settings.osm_xml_node_attrs,
edge_tags=settings.osm_xml_way_tags,
edge_attrs=settings.osm_xml_way_attrs,
oneway=False,
merge_edges=True,
edge_tag_aggs=None,
filename="graph.osm",
folder=None,
):
"""
Save a graph as an OSM XML formatted file. NOTE: for very large
networks this method can take upwards of 30+ minutes to finish.
Parameters
__________
data : networkx multi(di)graph OR a length 2 iterable of nodes/edges
geopandas.GeoDataFrames
filename : string
the name of the osm file (including file extension)
folder : string
the folder to contain the file, if None, use default data folder
node_attrs: list
osm node attributes to include in output OSM XML
edge_tags : list
osm way tags to include in output OSM XML
edge_attrs : list
osm way attributes to include in output OSM XML
oneway : bool
the default oneway value used to fill this tag where missing
merge_edges : bool
if True merges graph edges such that each OSM way has one entry
and one entry only in the OSM XML. Otherwise, every OSM way
will have a separate entry for each node pair it contains.
edge_tag_aggs : list of length-2 string tuples
useful only if merge_edges is True, this argument allows the user
to specify edge attributes to aggregate such that the merged
OSM way entry tags accurately represent the sum total of
their component edge attributes. For example, if the user
wants the OSM way to have a "length" attribute, the user must
specify `edge_tag_aggs=[('length', 'sum')]` in order to tell
this method to aggregate the lengths of the individual
component edges. Otherwise, the length attribute will simply
reflect the length of the first edge associated with the way.
Returns
-------
None
"""
start_time = time.time()
if folder is None:
folder = settings.data_folder
try:
assert settings.all_oneway
except AssertionError:
raise UserWarning(
"In order for ox.save_as_osm() to behave properly "
"the graph must have been created with the 'all_oneway' "
"setting set to True."
)
try:
gdf_nodes, gdf_edges = data
except ValueError:
gdf_nodes, gdf_edges = graph_to_gdfs(
data, node_geometry=False, fill_edge_geometry=False
)
# rename columns per osm specification
gdf_nodes.rename(columns={"osmid": "id", "x": "lon", "y": "lat"}, inplace=True)
if "id" in gdf_edges.columns:
gdf_edges = gdf_edges[[col for col in gdf_edges if col != "id"]]
if "uniqueid" in gdf_edges.columns:
gdf_edges = gdf_edges.rename(columns={"uniqueid": "id"})
else:
gdf_edges = gdf_edges.reset_index().rename(columns={"index": "id"})
# add default values for required attributes
for table in [gdf_nodes, gdf_edges]:
table["uid"] = "1"
table["user"] = "osmnx"
table["version"] = "1"
table["changeset"] = "1"
table["timestamp"] = "2017-01-01T00:00:00Z"
# convert all datatypes to str
nodes = gdf_nodes.applymap(str)
edges = gdf_edges.applymap(str)
# misc. string replacements to meet OSM XML spec
if "oneway" in edges.columns:
# fill blank oneway tags with default (False)
edges.loc[pd.isnull(edges["oneway"]), "oneway"] = oneway
edges.loc[:, "oneway"] = edges["oneway"].astype(str)
edges.loc[:, "oneway"] = (
edges["oneway"].str.replace("False", "no").replace("True", "yes")
)
# initialize XML tree with an OSM root element
root = etree.Element("osm", attrib={"version": "1", "generator": "OSMnx"})
# append nodes to the XML tree
for i, row in nodes.iterrows():
node = etree.SubElement(root, "node", attrib=row[node_attrs].dropna().to_dict())
for tag in node_tags:
etree.SubElement(node, "tag", attrib={"k": tag, "v": row[tag]})
# append edges to the XML tree
if merge_edges:
for e in edges["id"].unique():
all_way_edges = edges[edges["id"] == e]
first = all_way_edges.iloc[0]
edge = etree.SubElement(
root, "way", attrib=first[edge_attrs].dropna().to_dict()
)
if len(all_way_edges) == 1:
etree.SubElement(edge, "nd", attrib={"ref": first["u"]})
etree.SubElement(edge, "nd", attrib={"ref": first["v"]})
else:
# topological sort
ordered_nodes = get_unique_nodes_ordered_from_way(all_way_edges)
for node in ordered_nodes:
etree.SubElement(edge, "nd", attrib={"ref": node})
if edge_tag_aggs is None:
for tag in edge_tags:
etree.SubElement(edge, "tag", attrib={"k": tag, "v": first[tag]})
else:
for tag in edge_tags:
if tag not in [t for t, agg in edge_tag_aggs]:
etree.SubElement(
edge, "tag", attrib={"k": tag, "v": first[tag]}
)
for tag, agg in edge_tag_aggs:
etree.SubElement(
edge,
"tag",
attrib={"k": tag, "v": all_way_edges[tag].aggregate(agg)},
)
else:
# NOTE: this will generate separate OSM ways for each network edge,
# even if the edges are all part of the same original OSM way. As
# such, each way will be comprised of two nodes, and there will be
# many ways with the same OSM id. This does not conform to the
# OSM XML schema standard, however, the data will still comprise a
# valid network and will be readable by *most* OSM tools.
for i, row in edges.iterrows():
edge = etree.SubElement(
root, "way", attrib=row[edge_attrs].dropna().to_dict()
)
etree.SubElement(edge, "nd", attrib={"ref": row["u"]})
etree.SubElement(edge, "nd", attrib={"ref": row["v"]})
for tag in edge_tags:
etree.SubElement(edge, "tag", attrib={"k": tag, "v": row[tag]})
et = etree.ElementTree(root)
if not os.path.exists(folder):
os.makedirs(folder)
et.write(os.path.join(folder, filename))
log(
'Saved graph to disk as OSM at "{}" in {:,.2f} seconds'.format(
os.path.join(folder, filename), time.time() - start_time
)
)
|
https://github.com/gboeing/osmnx/issues/9
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-2-0d3afc68e96a> in <module>()
----> 1 ox.plot_graph(ox.graph_from_place('Modena, Italy'))
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in graph_from_place(query, network_type, simplify, retain_all, truncate_by_edge, name, which_result, buffer_dist, timeout, memory, max_query_area_size, clean_periphery)
1799 if isinstance(query, str) or isinstance(query, dict):
1800 # if it is a string (place name) or dict (structured place query), then it is a single place
-> 1801 gdf_place = gdf_from_place(query, which_result=which_result, buffer_dist=buffer_dist)
1802 name = query
1803 elif isinstance(query, list):
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in gdf_from_place(query, gdf_name, which_result, buffer_dist)
522
523 # get the data from OSM
--> 524 data = osm_polygon_download(query, limit=which_result)
525 if len(data) >= which_result:
526
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in osm_polygon_download(query, limit, polygon_geojson, pause_duration)
495
496 # request the URL, return the JSON
--> 497 response_json = nominatim_request(params=params, timeout=30)
498 return response_json
499
/usr/local/lib/python3.4/dist-packages/osmnx/osmnx.py in nominatim_request(params, pause_duration, timeout, error_pause_duration)
373 start_time = time.time()
374 log('Requesting {} with timeout={}'.format(prepared_url, timeout))
--> 375 response = requests.get(url, params, timeout=timeout)
376
377 # get the response size and the domain, log result
TypeError: get() takes 1 positional argument but 2 were given
|
TypeError
|
def getFileInfo(self, name, pathname):
try:
audio = None
size = os.stat(pathname).st_size
if size > 0:
try:
audio = taglib.File(pathname)
except IOError:
pass
if audio is not None:
bitrateinfo = (
int(audio.bitrate),
False,
) # Second argument used to be VBR (variable bitrate)
fileinfo = (name, size, bitrateinfo, int(audio.length))
else:
fileinfo = (name, size, None, None)
return fileinfo
except Exception as errtuple:
message = _("Error while scanning file %(path)s: %(error)s") % {
"path": pathname,
"error": errtuple,
}
self.logMessage(message)
displayTraceback(sys.exc_info()[2])
|
def getFileInfo(self, name, pathname):
try:
audio = None
size = os.stat(pathname).st_size
if size > 0:
try:
audio = taglib.File(pathname)
except IOError:
pass
if audio is not None and audio.length > 0:
fileinfo = (name, size, int(audio.bitrate), int(audio.length))
else:
fileinfo = (name, size, None, None)
return fileinfo
except Exception as errtuple:
message = _("Error while scanning file %(path)s: %(error)s") % {
"path": pathname,
"error": errtuple,
}
self.logMessage(message)
displayTraceback(sys.exc_info()[2])
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/527
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/threading.py", line 926, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 552, in run
conns, connsinprogress, server_socket = self.process_queue(queue, conns, connsinprogress, server_socket)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 1134, in process_queue
msg = msgObj.makeNetworkMessage()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1877, in makeNetworkMessage
self.packObject(1) +
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 277, in packObject
return struct.pack("<I", object.value)
struct.error: required argument is not an integer
|
struct.error
|
def getByteStream(self, fileinfo):
message = slskmessages.SlskMessage()
stream = bytearray()
stream.extend(
bytes([1])
+ message.packObject(fileinfo[0])
+ message.packObject(NetworkLongLongType(fileinfo[1]))
)
if fileinfo[2] is not None:
try:
msgbytes = bytearray()
msgbytes.extend(message.packObject("mp3") + message.packObject(3))
msgbytes.extend(
message.packObject(0)
+ message.packObject(NetworkIntType(fileinfo[2][0]))
+ message.packObject(1)
+ message.packObject(NetworkIntType(fileinfo[3]))
+ message.packObject(2)
+ message.packObject(NetworkIntType(fileinfo[2][1]))
)
stream.extend(msgbytes)
except Exception:
log.addwarning(
_(
"Found meta data that couldn't be encoded, possible corrupt file: '%(file)s' has a bitrate of %(bitrate)s kbs, a length of %(length)s seconds and a VBR of %(vbr)s"
% {
"file": fileinfo[0],
"bitrate": fileinfo[2][0],
"length": fileinfo[3],
"vbr": fileinfo[2][1],
}
)
)
stream.extend(message.packObject("") + message.packObject(0))
else:
stream.extend(message.packObject("") + message.packObject(0))
return stream
|
def getByteStream(self, fileinfo):
message = slskmessages.SlskMessage()
stream = bytearray()
stream.extend(
bytes([1])
+ message.packObject(fileinfo[0])
+ message.packObject(NetworkLongLongType(fileinfo[1]))
)
if fileinfo[2] is not None:
try:
msgbytes = bytearray()
msgbytes.extend(message.packObject("mp3") + message.packObject(2))
msgbytes.extend(
message.packObject(0)
+ message.packObject(NetworkIntType(fileinfo[2]))
+ message.packObject(1)
+ message.packObject(NetworkIntType(fileinfo[3]))
)
stream.extend(msgbytes)
except Exception:
log.addwarning(
_(
"Found meta data that couldn't be encoded, possible corrupt file: '%(file)s' has a bitrate of %(bitrate)s kbs and a length of %(length)s seconds"
% {
"file": fileinfo[0],
"bitrate": fileinfo[2],
"length": fileinfo[3],
}
)
)
stream.extend(message.packObject("") + message.packObject(0))
else:
stream.extend(message.packObject("") + message.packObject(0))
return stream
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/527
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/threading.py", line 926, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 552, in run
conns, connsinprogress, server_socket = self.process_queue(queue, conns, connsinprogress, server_socket)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 1134, in process_queue
msg = msgObj.makeNetworkMessage()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1877, in makeNetworkMessage
self.packObject(1) +
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 277, in packObject
return struct.pack("<I", object.value)
struct.error: required argument is not an integer
|
struct.error
|
def makeNetworkMessage(self):
filelist = []
for i in self.list:
try:
filelist.append(self.fileindex[str(i)])
except Exception:
pass
queuesize = self.inqueue[0]
msg = bytearray()
msg.extend(
self.packObject(self.user)
+ self.packObject(NetworkIntType(self.token))
+ self.packObject(NetworkIntType(len(filelist)))
)
for i in filelist:
msg.extend(
bytes([1])
+ self.packObject(i[0].replace(os.sep, "\\"))
+ self.packObject(NetworkLongLongType(i[1]))
)
if i[2] is None:
# No metadata
msg.extend(self.packObject("") + self.packObject(0))
else:
# FileExtension, NumAttributes,
msg.extend(self.packObject("mp3") + self.packObject(3))
msg.extend(
self.packObject(0)
+ self.packObject(NetworkIntType(i[2][0]))
+ self.packObject(1)
+ self.packObject(NetworkIntType(i[3]))
+ self.packObject(2)
+ self.packObject(i[2][1])
)
msg.extend(
bytes([self.freeulslots])
+ self.packObject(NetworkIntType(self.ulspeed))
+ self.packObject(NetworkIntType(queuesize))
)
return zlib.compress(msg)
|
def makeNetworkMessage(self):
filelist = []
for i in self.list:
try:
filelist.append(self.fileindex[str(i)])
except Exception:
pass
queuesize = self.inqueue[0]
msg = bytearray()
msg.extend(
self.packObject(self.user)
+ self.packObject(NetworkIntType(self.token))
+ self.packObject(NetworkIntType(len(filelist)))
)
for i in filelist:
msg.extend(
bytes([1])
+ self.packObject(i[0].replace(os.sep, "\\"))
+ self.packObject(NetworkLongLongType(i[1]))
)
if i[2] is None:
# No metadata
msg.extend(self.packObject("") + self.packObject(0))
else:
# FileExtension, NumAttributes,
msg.extend(self.packObject("mp3") + self.packObject(2))
msg.extend(
self.packObject(0)
+ self.packObject(NetworkIntType(i[2]))
+ self.packObject(1)
+ self.packObject(NetworkIntType(i[3]))
)
msg.extend(
bytes([self.freeulslots])
+ self.packObject(NetworkIntType(self.ulspeed))
+ self.packObject(NetworkIntType(queuesize))
)
return zlib.compress(msg)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/527
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/threading.py", line 926, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 552, in run
conns, connsinprogress, server_socket = self.process_queue(queue, conns, connsinprogress, server_socket)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 1134, in process_queue
msg = msgObj.makeNetworkMessage()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1877, in makeNetworkMessage
self.packObject(1) +
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 277, in packObject
return struct.pack("<I", object.value)
struct.error: required argument is not an integer
|
struct.error
|
def makeNetworkMessage(self):
msg = (
self.packObject(1)
+ self.packObject(self.dir)
+ self.packObject(1)
+ self.packObject(self.dir)
+ self.packObject(len(self.list))
)
for i in self.list:
msg = (
msg
+ bytes([1])
+ self.packObject(i[0])
+ self.packObject(i[1])
+ self.packObject(0)
)
if i[2] is None:
msg = msg + self.packObject("") + self.packObject(0)
else:
msg = msg + self.packObject("mp3") + self.packObject(3)
msg = (
msg
+ self.packObject(0)
+ self.packObject(i[2][0])
+ self.packObject(1)
+ self.packObject(i[3])
+ self.packObject(2)
+ self.packObject(i[2][1])
)
return zlib.compress(msg)
|
def makeNetworkMessage(self):
msg = (
self.packObject(1)
+ self.packObject(self.dir)
+ self.packObject(1)
+ self.packObject(self.dir)
+ self.packObject(len(self.list))
)
for i in self.list:
msg = (
msg
+ bytes([1])
+ self.packObject(i[0])
+ self.packObject(i[1])
+ self.packObject(0)
)
if i[2] is None:
msg = msg + self.packObject("") + self.packObject(0)
else:
msg = msg + self.packObject("mp3") + self.packObject(2)
msg = (
msg
+ self.packObject(0)
+ self.packObject(i[2])
+ self.packObject(1)
+ self.packObject(i[3])
)
return zlib.compress(msg)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/527
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/threading.py", line 926, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 552, in run
conns, connsinprogress, server_socket = self.process_queue(queue, conns, connsinprogress, server_socket)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 1134, in process_queue
msg = msgObj.makeNetworkMessage()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1877, in makeNetworkMessage
self.packObject(1) +
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 277, in packObject
return struct.pack("<I", object.value)
struct.error: required argument is not an integer
|
struct.error
|
def GetResultBitrateLength(filesize, attributes):
"""Used to get the audio bitrate and length of search results and
user browse files"""
h_bitrate = ""
h_length = ""
bitrate = 0
# If there are 3 entries in the attribute list
if len(attributes) == 3:
a = attributes
# Sometimes the vbr indicator is in third position
if a[2] == 0 or a[2] == 1:
if a[2] == 1:
h_bitrate = " (vbr)"
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
h_length = "%i:%02i" % (a[1] / 60, a[1] % 60)
# Sometimes the vbr indicator is in second position
elif a[1] == 0 or a[1] == 1:
if a[1] == 1:
h_bitrate = " (vbr)"
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
h_length = "%i:%02i" % (a[2] / 60, a[2] % 60)
# Lossless audio, length is in first position
elif a[2] > 1:
# Bitrate = sample rate (Hz) * word length (bits) * channel count
# Bitrate = 44100 * 16 * 2
bitrate = (a[1] * a[2] * 2) / 1000
h_bitrate = str(bitrate)
h_length = "%i:%02i" % (a[0] / 60, a[0] % 60)
else:
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
# If there are 2 entries in the attribute list
elif len(attributes) == 2:
a = attributes
# Sometimes the vbr indicator is in second position
if a[1] == 0 or a[1] == 1:
# If it's a vbr file we can't deduce the length
if a[1] == 1:
h_bitrate = " (vbr)"
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
# If it's a constant bitrate we can deduce the length
else:
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
# Dividing the file size by the bitrate in Bytes should give us a good enough approximation
length = filesize / (bitrate / 8 * 1000)
h_length = "%i:%02i" % (length / 60, length % 60)
# Sometimes the bitrate is in first position and the length in second position
else:
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
h_length = "%i:%02i" % (a[1] / 60, a[1] % 60)
return h_bitrate, bitrate, h_length
|
def GetResultBitrateLength(filesize, attributes):
"""Used to get the audio bitrate and length of search results and
user browse files"""
h_bitrate = ""
h_length = ""
bitrate = 0
# If there are 3 entries in the attribute list
if len(attributes) == 3:
a = attributes
# Sometimes the vbr indicator is in third position
if a[2] == 0 or a[2] == 1:
if a[2] == 1:
h_bitrate = " (vbr)"
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
h_length = "%i:%02i" % (a[1] / 60, a[1] % 60)
# Sometimes the vbr indicator is in second position
elif a[1] == 0 or a[1] == 1:
if a[1] == 1:
h_bitrate = " (vbr)"
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
h_length = "%i:%02i" % (a[2] / 60, a[2] % 60)
# Lossless audio, length is in first position
elif a[2] > 1:
# Bitrate = sample rate (Hz) * word length (bits) * channel count
# Bitrate = 44100 * 16 * 2
bitrate = (a[1] * a[2] * 2) / 1000
h_bitrate = str(bitrate)
h_length = "%i:%02i" % (a[0] / 60, a[0] % 60)
else:
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
# If there are 2 entries in the attribute list
elif len(attributes) == 2:
a = attributes
# Sometimes the vbr indicator is in second position
if a[0] > 0 and (a[1] == 0 or a[1] == 1):
# If it's a vbr file we can't deduce the length
if a[1] == 1:
h_bitrate = " (vbr)"
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
# If it's a constant bitrate we can deduce the length
else:
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
# Dividing the file size by the bitrate in Bytes should give us a good enough approximation
length = filesize / (bitrate / 8 * 1000)
h_length = "%i:%02i" % (length / 60, length % 60)
# Sometimes the bitrate is in first position and the length in second position
else:
bitrate = a[0]
h_bitrate = str(bitrate) + h_bitrate
h_length = "%i:%02i" % (a[1] / 60, a[1] % 60)
return h_bitrate, bitrate, h_length
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/527
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/threading.py", line 926, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 552, in run
conns, connsinprogress, server_socket = self.process_queue(queue, conns, connsinprogress, server_socket)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskproto.py", line 1134, in process_queue
msg = msgObj.makeNetworkMessage()
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1877, in makeNetworkMessage
self.packObject(1) +
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 277, in packObject
return struct.pack("<I", object.value)
struct.error: required argument is not an integer
|
struct.error
|
def setBuddyShares(self, files, streams, wordindex, fileindex, mtimes):
storable_objects = [
(files, "bsharedfiles", "buddyfiles.db"),
(streams, "bsharedfilesstreams", "buddystreams.db"),
(mtimes, "bsharedmtimes", "buddymtimes.db"),
(wordindex, "bwordindex", "buddywordindex.db"),
(fileindex, "bfileindex", "buddyfileindex.db"),
]
self.storeObjects(storable_objects)
|
def setBuddyShares(self, files, streams, wordindex, fileindex, mtimes):
storable_objects = [
(files, "bsharedfiles", "buddyfiles.db"),
(streams, "bsharedfilesstreams", "buddystreams.db"),
(mtimes, "bsharedmtimes", "buddymtimes.db"),
(wordindex, "bwordindex", "buddywordindex.db"),
(fileindex, "bfileindex", "buddyfileindex.db"),
]
self.config_lock.acquire()
_thread.start_new_thread(self._storeObjects, (storable_objects,))
self.config_lock.release()
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def setShares(self, files, streams, wordindex, fileindex, mtimes):
storable_objects = [
(files, "sharedfiles", "files.db"),
(streams, "sharedfilesstreams", "streams.db"),
(mtimes, "sharedmtimes", "mtimes.db"),
(wordindex, "wordindex", "wordindex.db"),
(fileindex, "fileindex", "fileindex.db"),
]
self.storeObjects(storable_objects)
|
def setShares(self, files, streams, wordindex, fileindex, mtimes):
storable_objects = [
(files, "sharedfiles", "files.db"),
(streams, "sharedfilesstreams", "streams.db"),
(mtimes, "sharedmtimes", "mtimes.db"),
(wordindex, "wordindex", "wordindex.db"),
(fileindex, "fileindex", "fileindex.db"),
]
self.config_lock.acquire()
_thread.start_new_thread(self._storeObjects, (storable_objects,))
self.config_lock.release()
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def _BuddyRescanFinished(self, files, streams, wordindex, fileindex, mtimes):
self.np.config.setBuddyShares(files, streams, wordindex, fileindex, mtimes)
if self.np.config.sections["transfers"]["enablebuddyshares"]:
self.rescan_buddy.set_sensitive(True)
self.browse_buddy_shares.set_sensitive(True)
if self.np.transfers is not None:
self.np.shares.sendNumSharedFoldersFiles()
self.brescanning = 0
self.logMessage(_("Rescanning Buddy Shares finished"))
self.BuddySharesProgress.hide()
self.np.shares.CompressShares("buddy")
|
def _BuddyRescanFinished(self, files, streams, wordindex, fileindex, mtimes):
self.np.config.setBuddyShares(files, streams, wordindex, fileindex, mtimes)
self.np.config.writeShares()
if self.np.config.sections["transfers"]["enablebuddyshares"]:
self.rescan_buddy.set_sensitive(True)
self.browse_buddy_shares.set_sensitive(True)
if self.np.transfers is not None:
self.np.shares.sendNumSharedFoldersFiles()
self.brescanning = 0
self.logMessage(_("Rescanning Buddy Shares finished"))
self.BuddySharesProgress.hide()
self.np.shares.CompressShares("buddy")
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def _RescanFinished(self, files, streams, wordindex, fileindex, mtimes):
self.np.config.setShares(files, streams, wordindex, fileindex, mtimes)
if self.np.config.sections["transfers"]["shared"]:
self.rescan_public.set_sensitive(True)
self.browse_public_shares.set_sensitive(True)
self.rescanning = 0
self.logMessage(_("Rescanning finished"))
self.SharesProgress.hide()
self.np.shares.CompressShares("normal")
if self.np.transfers is not None:
self.np.shares.sendNumSharedFoldersFiles()
|
def _RescanFinished(self, files, streams, wordindex, fileindex, mtimes):
self.np.config.setShares(files, streams, wordindex, fileindex, mtimes)
self.np.config.writeShares()
if self.np.config.sections["transfers"]["shared"]:
self.rescan_public.set_sensitive(True)
self.browse_public_shares.set_sensitive(True)
if self.np.transfers is not None:
self.np.shares.sendNumSharedFoldersFiles()
self.rescanning = 0
self.logMessage(_("Rescanning finished"))
self.SharesProgress.hide()
self.np.shares.CompressShares("normal")
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def sendNumSharedFoldersFiles(self):
"""
Send number of files in buddy shares if only buddies can
download, and buddy-shares are enabled.
"""
conf = self.config.sections
if conf["transfers"]["enablebuddyshares"] and conf["transfers"]["friendsonly"]:
shared_db = "bsharedfiles"
index_db = "bfileindex"
else:
shared_db = "sharedfiles"
index_db = "fileindex"
sharedfolders = len(conf["transfers"][shared_db])
sharedfiles = len(conf["transfers"][index_db])
self.queue.put(slskmessages.SharedFoldersFiles(sharedfolders, sharedfiles))
|
def sendNumSharedFoldersFiles(self):
"""
Send number of files in buddy shares if only buddies can
download, and buddy-shares are enabled.
"""
conf = self.config.sections
if conf["transfers"]["enablebuddyshares"] and conf["transfers"]["friendsonly"]:
shared_db = "bsharedfiles"
else:
shared_db = "sharedfiles"
sharedfolders = len(conf["transfers"][shared_db])
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
self.queue.put(slskmessages.SharedFoldersFiles(sharedfolders, sharedfiles))
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def RebuildShares(self, msg):
self._RescanShares(msg, "normal", rebuild=True)
|
def RebuildShares(self, msg):
self.RescanShares(msg, rebuild=True)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def RescanShares(self, msg, rebuild=False):
self._RescanShares(msg, "normal", rebuild)
|
def RescanShares(self, msg, rebuild=False):
try:
files, streams, wordindex, fileindex, mtimes = self.rescandirs(
msg.shared,
self.config.sections["transfers"]["sharedmtimes"],
self.config.sections["transfers"]["sharedfiles"],
self.config.sections["transfers"]["sharedfilesstreams"],
msg.yieldfunction,
self.np.frame.SharesProgress,
name=_("Shares"),
rebuild=rebuild,
)
time.sleep(0.5)
self.np.frame.RescanFinished(
files, streams, wordindex, fileindex, mtimes, "normal"
)
except Exception as ex:
config_dir, data_dir = GetUserDirectories()
log.addwarning(
_(
"Failed to rebuild share, serious error occurred. If this problem persists delete %s/*.db and try again. If that doesn't help please file a bug report with the stack trace included (see terminal output after this message). Technical details: %s"
)
% (data_dir, ex)
)
raise
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def RebuildBuddyShares(self, msg):
self._RescanShares(msg, "buddy", rebuild=True)
|
def RebuildBuddyShares(self, msg):
self.RescanBuddyShares(msg, rebuild=True)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def RescanBuddyShares(self, msg, rebuild=False):
self._RescanShares(msg, "buddy", rebuild)
|
def RescanBuddyShares(self, msg, rebuild=False):
files, streams, wordindex, fileindex, mtimes = self.rescandirs(
msg.shared,
self.config.sections["transfers"]["bsharedmtimes"],
self.config.sections["transfers"]["bsharedfiles"],
self.config.sections["transfers"]["bsharedfilesstreams"],
msg.yieldfunction,
self.np.frame.BuddySharesProgress,
name=_("Buddy Shares"),
rebuild=rebuild,
)
time.sleep(0.5)
self.np.frame.RescanFinished(files, streams, wordindex, fileindex, mtimes, "buddy")
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def CompressShares(self, sharestype):
if sharestype == "normal":
streams = self.config.sections["transfers"]["sharedfilesstreams"]
elif sharestype == "buddy":
streams = self.config.sections["transfers"]["bsharedfilesstreams"]
if streams is None:
message = _("ERROR: No %(type)s shares database available") % {
"type": sharestype
}
print(message)
self.logMessage(message, None)
return
m = slskmessages.SharedFileList(None, streams)
_thread.start_new_thread(m.makeNetworkMessage, (0, True))
if sharestype == "normal":
self.CompressedSharesNormal = m
elif sharestype == "buddy":
self.CompressedSharesBuddy = m
|
def CompressShares(self, sharestype):
if sharestype == "normal":
streams = self.config.sections["transfers"]["sharedfilesstreams"]
elif sharestype == "buddy":
streams = self.config.sections["transfers"]["bsharedfilesstreams"]
if streams is None:
message = _("ERROR: No %(type)s shares database available") % {
"type": sharestype
}
print(message)
self.logMessage(message, None)
return
m = slskmessages.SharedFileList(None, streams)
m.makeNetworkMessage(nozlib=0, rebuild=True)
if sharestype == "normal":
self.CompressedSharesNormal = m
elif sharestype == "buddy":
self.CompressedSharesBuddy = m
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def processSearchRequest(self, searchterm, user, searchid, direct=0):
if not self.config.sections["searches"]["search_results"]:
# Don't return _any_ results when this option is disabled
return
if searchterm is None:
return
if user == self.config.sections["server"]["login"]:
# We shouldn't send a search response if we initiated the search request
return
checkuser, reason = self.np.CheckUser(user, None)
if not checkuser:
return
if reason == "geoip":
geoip = 1
else:
geoip = 0
maxresults = self.config.sections["searches"]["maxresults"]
if checkuser == 2:
wordindex = self.config.sections["transfers"]["bwordindex"]
fileindex = self.config.sections["transfers"]["bfileindex"]
else:
wordindex = self.config.sections["transfers"]["wordindex"]
fileindex = self.config.sections["transfers"]["fileindex"]
fifoqueue = self.config.sections["transfers"]["fifoqueue"]
if maxresults == 0:
return
terms = searchterm.translate(self.translatepunctuation).lower().split()
try:
list = [wordindex[i][:] for i in terms if i in wordindex]
except ValueError:
# Shelf is probably closed, perhaps when rescanning share
return
if len(list) != len(terms) or len(list) == 0:
return
min = list[0]
for i in list[1:]:
if len(i) < len(min):
min = i
list.remove(min)
for i in min[:]:
for j in list:
if i not in j:
min.remove(i)
break
results = min[:maxresults]
if len(results) > 0 and self.np.transfers is not None:
queuesizes = self.np.transfers.getUploadQueueSizes()
slotsavail = self.np.transfers.allowNewUploads()
if len(results) > 0:
message = slskmessages.FileSearchResult(
None,
self.config.sections["server"]["login"],
geoip,
searchid,
results,
fileindex,
slotsavail,
self.np.speed,
queuesizes,
fifoqueue,
)
self.np.ProcessRequestToPeer(user, message)
if direct:
self.logMessage(
_(
"User %(user)s is directly searching for %(query)s, returning %(num)i results"
)
% {"user": user, "query": searchterm, "num": len(results)},
2,
)
else:
self.logMessage(
_(
"User %(user)s is searching for %(query)s, returning %(num)i results"
)
% {"user": user, "query": searchterm, "num": len(results)},
2,
)
|
def processSearchRequest(self, searchterm, user, searchid, direct=0):
if not self.config.sections["searches"]["search_results"]:
# Don't return _any_ results when this option is disabled
return
if searchterm is None:
return
if user == self.config.sections["server"]["login"]:
# We shouldn't send a search response if we initiated the search request
return
checkuser, reason = self.np.CheckUser(user, None)
if not checkuser:
return
if reason == "geoip":
geoip = 1
else:
geoip = 0
maxresults = self.config.sections["searches"]["maxresults"]
if checkuser == 2:
wordindex = self.config.sections["transfers"]["bwordindex"]
fileindex = self.config.sections["transfers"]["bfileindex"]
else:
wordindex = self.config.sections["transfers"]["wordindex"]
fileindex = self.config.sections["transfers"]["fileindex"]
fifoqueue = self.config.sections["transfers"]["fifoqueue"]
if maxresults == 0:
return
terms = searchterm.translate(self.translatepunctuation).lower().split()
list = [wordindex[i][:] for i in terms if i in wordindex]
if len(list) != len(terms) or len(list) == 0:
return
min = list[0]
for i in list[1:]:
if len(i) < len(min):
min = i
list.remove(min)
for i in min[:]:
for j in list:
if i not in j:
min.remove(i)
break
results = min[:maxresults]
if len(results) > 0 and self.np.transfers is not None:
queuesizes = self.np.transfers.getUploadQueueSizes()
slotsavail = self.np.transfers.allowNewUploads()
if len(results) > 0:
message = slskmessages.FileSearchResult(
None,
self.config.sections["server"]["login"],
geoip,
searchid,
results,
fileindex,
slotsavail,
self.np.speed,
queuesizes,
fifoqueue,
)
self.np.ProcessRequestToPeer(user, message)
if direct:
self.logMessage(
_(
"User %(user)s is directly searching for %(query)s, returning %(num)i results"
)
% {"user": user, "query": searchterm, "num": len(results)},
2,
)
else:
self.logMessage(
_(
"User %(user)s is searching for %(query)s, returning %(num)i results"
)
% {"user": user, "query": searchterm, "num": len(results)},
2,
)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def rescandirs(
self,
shared,
oldmtimes,
oldfiles,
sharedfilesstreams,
yieldfunction,
progress=None,
name="",
rebuild=False,
):
"""
Check for modified or new files via OS's last mtime on a directory,
or, if rebuild is True, all directories
"""
GLib.idle_add(progress.set_fraction, 0.0)
GLib.idle_add(progress.show)
# returns dict in format: { Directory : mtime, ... }
shared_directories = [x[1] for x in shared]
self.logMessage(
_("%(num)s directories found before rescan, rebuilding...")
% {"num": len(oldmtimes)}
)
newmtimes = self.getDirsMtimes(shared_directories, yieldfunction)
# Get list of files
# returns dict in format { Directory : { File : metadata, ... }, ... }
newsharedfiles = self.getFilesList(
newmtimes, oldmtimes, oldfiles, yieldfunction, progress, rebuild
)
# Pack shares data
# returns dict in format { Directory : hex string of files+metadata, ... }
newsharedfilesstreams = self.getFilesStreams(
newmtimes, oldmtimes, sharedfilesstreams, newsharedfiles, rebuild, yieldfunction
)
# Update Search Index
# newwordindex is a dict in format {word: [num, num, ..], ... } with num matching
# keys in newfileindex
# newfileindex is a dict in format { num: (path, size, (bitrate, vbr), length), ... }
newwordindex, newfileindex = self.getFilesIndex(
newmtimes, oldmtimes, newsharedfiles, yieldfunction, progress
)
self.logMessage(
_("%(num)s directories found after rescan") % {"num": len(newmtimes)}
)
return newsharedfiles, newsharedfilesstreams, newwordindex, newfileindex, newmtimes
|
def rescandirs(
self,
shared,
oldmtimes,
oldfiles,
sharedfilesstreams,
yieldfunction,
progress=None,
name="",
rebuild=False,
):
"""
Check for modified or new files via OS's last mtime on a directory,
or, if rebuild is True, all directories
"""
# returns dict in format: { Directory : mtime, ... }
shared_directories = [x[1] for x in shared]
GLib.idle_add(progress.set_text, _("Checking for changes"))
GLib.idle_add(progress.show)
GLib.idle_add(progress.set_fraction, 0)
self.logMessage(
_("%(num)s directories found before rescan, rebuilding...")
% {"num": len(oldmtimes)}
)
newmtimes = self.getDirsMtimes(shared_directories, yieldfunction)
GLib.idle_add(progress.set_text, _("Scanning %s") % name)
# Get list of files
# returns dict in format { Directory : { File : metadata, ... }, ... }
newsharedfiles = self.getFilesList(
newmtimes, oldmtimes, oldfiles, yieldfunction, progress, rebuild
)
# Pack shares data
# returns dict in format { Directory : hex string of files+metadata, ... }
GLib.idle_add(progress.set_text, _("Building Database"))
newsharedfilesstreams = self.getFilesStreams(
newmtimes, oldmtimes, sharedfilesstreams, newsharedfiles, rebuild, yieldfunction
)
# Update Search Index
# newwordindex is a dict in format {word: [num, num, ..], ... } with num matching
# keys in newfileindex
# newfileindex is a dict in format { num: (path, size, (bitrate, vbr), length), ... }
GLib.idle_add(progress.set_text, _("Building Index"))
GLib.idle_add(progress.set_fraction, 0.0)
newwordindex, newfileindex = self.getFilesIndex(
newmtimes,
oldmtimes,
shared_directories,
newsharedfiles,
yieldfunction,
progress,
)
GLib.idle_add(progress.set_fraction, 1.0)
self.logMessage(
_("%(num)s directories found after rescan") % {"num": len(newmtimes)}
)
return newsharedfiles, newsharedfilesstreams, newwordindex, newfileindex, newmtimes
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def getDirsMtimes(self, dirs, yieldcall=None):
list = {}
for folder in dirs:
try:
if self.hiddenCheck({"dir": folder}):
continue
mtime = os.path.getmtime(folder)
list[folder] = mtime
for entry in os.scandir(folder):
if entry.is_dir():
path = entry.path
try:
mtime = entry.stat().st_mtime
except OSError as errtuple:
message = _("Scanning Error: %(error)s Path: %(path)s") % {
"error": errtuple,
"path": path,
}
print(str(message))
self.logMessage(message)
continue
list[path] = mtime
dircontents = self.getDirsMtimes([path])
for k in dircontents:
list[k] = dircontents[k]
if yieldcall is not None:
yieldcall()
except OSError as errtuple:
message = _("Scanning Directory Error: %(error)s Path: %(path)s") % {
"error": errtuple,
"path": folder,
}
print(str(message))
self.logMessage(message)
continue
return list
|
def getDirsMtimes(self, dirs, yieldcall=None):
list = {}
for folder in dirs:
folder = os.path.expanduser(folder.replace("//", "/"))
if self.hiddenCheck({"dir": folder}):
continue
try:
mtime = os.path.getmtime(folder)
except OSError as errtuple:
message = _("Scanning Directory Error: %(error)s Path: %(path)s") % {
"error": errtuple,
"path": folder,
}
print(str(message))
self.logMessage(message)
continue
list[folder] = mtime
try:
for entry in os.scandir(folder):
if entry.is_dir():
path = os.path.join(folder, entry.path.split("/")[-1])
try:
mtime = os.path.getmtime(path)
except OSError as errtuple:
islink = False
try:
islink = os.path.islink(path)
except OSError as errtuple2:
print(errtuple2)
if islink:
message = _(
'Scanning Error: Broken link to directory: "%(link)s" from Path: "%(path)s". Repair or remove this link.'
) % {"link": os.readlink(path), "path": path}
else:
message = _("Scanning Error: %(error)s Path: %(path)s") % {
"error": errtuple,
"path": path,
}
print(str(message))
self.logMessage(message)
continue
list[path] = mtime
dircontents = self.getDirsMtimes([path])
for k in dircontents:
list[k] = dircontents[k]
if yieldcall is not None:
yieldcall()
except OSError as errtuple:
message = _("Scanning Directory Error: %(error)s Path: %(path)s") % {
"error": errtuple,
"path": folder,
}
print(str(message))
self.logMessage(message)
continue
return list
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def getFilesList(
self, mtimes, oldmtimes, oldlist, yieldcall=None, progress=None, rebuild=False
):
"""Get a list of files with their filelength, bitrate and track length in seconds"""
list = {}
count = 0
lastpercent = 0.0
for folder in mtimes:
try:
count += 1
if progress:
# Truncate the percentage to two decimal places to avoid sending data to the GUI thread too often
percent = float("%.2f" % (float(count) / len(mtimes) * 0.75))
if percent > lastpercent and percent <= 1.0:
GLib.idle_add(progress.set_fraction, percent)
lastpercent = percent
if self.hiddenCheck({"dir": folder}):
continue
if not rebuild and folder in oldmtimes:
if mtimes[folder] == oldmtimes[folder]:
if os.path.exists(folder):
try:
virtualdir = self.real2virtual(folder)
list[virtualdir] = oldlist[virtualdir]
continue
except KeyError:
log.adddebug(
_(
"Inconsistent cache for '%(vdir)s', rebuilding '%(dir)s'"
)
% {"vdir": virtualdir, "dir": folder}
)
else:
log.adddebug(
_("Dropping missing directory %(dir)s") % {"dir": folder}
)
continue
virtualdir = self.real2virtual(folder)
list[virtualdir] = []
for entry in os.scandir(folder):
if entry.is_file():
filename = entry.name
if self.hiddenCheck({"dir": folder, "file": filename}):
continue
# Get the metadata of the file via mutagen
data = self.getFileInfo(filename, entry.path)
if data is not None:
list[virtualdir].append(data)
if yieldcall is not None:
yieldcall()
except OSError as errtuple:
message = _("Scanning Directory Error: %(error)s Path: %(path)s") % {
"error": errtuple,
"path": folder,
}
print(str(message))
self.logMessage(message)
continue
return list
|
def getFilesList(
self, mtimes, oldmtimes, oldlist, yieldcall=None, progress=None, rebuild=False
):
"""Get a list of files with their filelength, bitrate and track length in seconds"""
list = {}
count = 0
for folder in mtimes:
folder = os.path.expanduser(folder)
virtualdir = self.real2virtual(folder)
count += 1
if progress:
percent = float(count) / len(mtimes)
if percent <= 1.0:
GLib.idle_add(progress.set_fraction, percent)
if self.hiddenCheck({"dir": folder}):
continue
if not rebuild and folder in oldmtimes:
if mtimes[folder] == oldmtimes[folder]:
if os.path.exists(folder):
try:
list[virtualdir] = oldlist[virtualdir]
continue
except KeyError:
log.addwarning(
_("Inconsistent cache for '%(vdir)s', rebuilding '%(dir)s'")
% {"vdir": virtualdir, "dir": folder}
)
else:
log.adddebug(
_("Dropping missing directory %(dir)s") % {"dir": folder}
)
continue
list[virtualdir] = []
try:
for entry in os.scandir(folder):
if entry.is_file():
filename = entry.path.split("/")[-1]
if self.hiddenCheck({"dir": folder, "file": filename}):
continue
path = os.path.join(folder, filename)
# Get the metadata of the file via mutagen
data = self.getFileInfo(filename, path)
if data is not None:
list[virtualdir].append(data)
if yieldcall is not None:
yieldcall()
except OSError as errtuple:
message = _("Scanning Directory Error: %(error)s Path: %(path)s") % {
"error": errtuple,
"path": folder,
}
print(str(message))
self.logMessage(message)
continue
return list
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def getFileInfo(self, name, pathname):
try:
size = os.stat(pathname).st_size
info = metadata.detect(pathname)
if info:
# Sometimes the duration (time) or the bitrate of the file is unknown
if info["time"] is None or info["bitrate"] is None:
fileinfo = (name, size, None, None)
else:
bitrateinfo = (int(info["bitrate"]), int(info["vbr"]))
fileinfo = (name, size, bitrateinfo, int(info["time"]))
else:
fileinfo = (name, size, None, None)
return fileinfo
except Exception as errtuple:
message = _("Scanning File Error: %(error)s Path: %(path)s") % {
"error": errtuple,
"path": pathname,
}
self.logMessage(message)
displayTraceback(sys.exc_info()[2])
|
def getFileInfo(self, name, pathname):
try:
size = os.path.getsize(pathname)
info = metadata.detect(pathname)
if info:
# Sometimes the duration (time) or the bitrate of the file is unknown
if info["time"] is None or info["bitrate"] is None:
fileinfo = (name, size, None, None)
else:
bitrateinfo = (int(info["bitrate"]), int(info["vbr"]))
fileinfo = (name, size, bitrateinfo, int(info["time"]))
else:
fileinfo = (name, size, None, None)
return fileinfo
except Exception as errtuple:
message = _("Scanning File Error: %(error)s Path: %(path)s") % {
"error": errtuple,
"path": pathname,
}
self.logMessage(message)
displayTraceback(sys.exc_info()[2])
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def getFilesStreams(
self, mtimes, oldmtimes, oldstreams, newsharedfiles, rebuild=False, yieldcall=None
):
streams = {}
for folder in mtimes:
virtualdir = self.real2virtual(folder)
if self.hiddenCheck({"dir": folder}):
continue
if not rebuild and folder in oldmtimes:
if mtimes[folder] == oldmtimes[folder]:
if os.path.exists(folder):
# No change
try:
streams[virtualdir] = oldstreams[virtualdir]
continue
except KeyError:
log.addwarning(
_("Inconsistent cache for '%(vdir)s', rebuilding '%(dir)s'")
% {"vdir": virtualdir, "dir": folder}
)
else:
log.adddebug(
_("Dropping missing directory %(dir)s") % {"dir": folder}
)
continue
streams[virtualdir] = self.getDirStream(newsharedfiles[virtualdir])
if yieldcall is not None:
yieldcall()
return streams
|
def getFilesStreams(
self, mtimes, oldmtimes, oldstreams, newsharedfiles, rebuild=False, yieldcall=None
):
streams = {}
for directory in list(mtimes.keys()):
virtualdir = self.real2virtual(directory)
if self.hiddenCheck({"dir": directory}):
continue
if not rebuild and directory in oldmtimes:
if mtimes[directory] == oldmtimes[directory]:
if os.path.exists(directory):
# No change
try:
streams[virtualdir] = oldstreams[virtualdir]
continue
except KeyError:
log.addwarning(
_("Inconsistent cache for '%(vdir)s', rebuilding '%(dir)s'")
% {"vdir": virtualdir, "dir": directory}
)
else:
log.adddebug(
_("Dropping missing directory %(dir)s") % {"dir": directory}
)
continue
streams[virtualdir] = self.getDirStream(newsharedfiles[virtualdir])
if yieldcall is not None:
yieldcall()
return streams
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def getDirStream(self, dir):
stream = bytearray()
stream.extend(slskmessages.SlskMessage().packObject(NetworkIntType(len(dir))))
for file_and_meta in dir:
stream.extend(self.getByteStream(file_and_meta))
return stream
|
def getDirStream(self, dir):
msg = slskmessages.SlskMessage()
stream = msg.packObject(NetworkIntType(len(dir)))
for file_and_meta in dir:
stream += self.getByteStream(file_and_meta)
return stream
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def getByteStream(self, fileinfo):
message = slskmessages.SlskMessage()
stream = bytearray()
stream.extend(
bytes([1])
+ message.packObject(fileinfo[0])
+ message.packObject(NetworkLongLongType(fileinfo[1]))
)
if fileinfo[2] is not None:
try:
msgbytes = bytearray()
msgbytes.extend(message.packObject("mp3") + message.packObject(3))
msgbytes.extend(
message.packObject(0)
+ message.packObject(NetworkIntType(fileinfo[2][0]))
+ message.packObject(1)
+ message.packObject(NetworkIntType(fileinfo[3]))
+ message.packObject(2)
+ message.packObject(NetworkIntType(fileinfo[2][1]))
)
stream.extend(msgbytes)
except Exception:
log.addwarning(
_(
"Found meta data that couldn't be encoded, possible corrupt file: '%(file)s' has a bitrate of %(bitrate)s kbs, a length of %(length)s seconds and a VBR of %(vbr)s"
% {
"file": fileinfo[0],
"bitrate": fileinfo[2][0],
"length": fileinfo[3],
"vbr": fileinfo[2][1],
}
)
)
stream.extend(message.packObject("") + message.packObject(0))
else:
stream.extend(message.packObject("") + message.packObject(0))
return stream
|
def getByteStream(self, fileinfo):
message = slskmessages.SlskMessage()
stream = (
bytes([1])
+ message.packObject(fileinfo[0])
+ message.packObject(NetworkLongLongType(fileinfo[1]))
)
if fileinfo[2] is not None:
try:
msgbytes = b""
msgbytes += message.packObject("mp3") + message.packObject(3)
msgbytes += (
message.packObject(0)
+ message.packObject(NetworkIntType(fileinfo[2][0]))
+ message.packObject(1)
+ message.packObject(NetworkIntType(fileinfo[3]))
+ message.packObject(2)
+ message.packObject(NetworkIntType(fileinfo[2][1]))
)
stream += msgbytes
except Exception:
log.addwarning(
_(
"Found meta data that couldn't be encoded, possible corrupt file: '%(file)s' has a bitrate of %(bitrate)s kbs, a length of %(length)s seconds and a VBR of %(vbr)s"
% {
"file": fileinfo[0],
"bitrate": fileinfo[2][0],
"length": fileinfo[3],
"vbr": fileinfo[2][1],
}
)
)
stream += message.packObject("") + message.packObject(0)
else:
stream += message.packObject("") + message.packObject(0)
return stream
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def getFilesIndex(
self, mtimes, oldmtimes, newsharedfiles, yieldcall=None, progress=None
):
wordindex = defaultdict(list)
fileindex = []
index = 0
count = len(mtimes)
lastpercent = 0.0
for directory in mtimes:
virtualdir = self.real2virtual(directory)
count += 1
if progress:
# Truncate the percentage to two decimal places to avoid sending data to the GUI thread too often
percent = float("%.2f" % (float(count) / len(mtimes) * 0.75))
if percent > lastpercent and percent <= 1.0:
GLib.idle_add(progress.set_fraction, percent)
lastpercent = percent
if self.hiddenCheck({"dir": directory}):
continue
for j in newsharedfiles[virtualdir]:
file = j[0]
fileindex.append((virtualdir + "\\" + file,) + j[1:])
# Collect words from filenames for Search index (prevent duplicates with set)
words = set(
(virtualdir + " " + file)
.translate(self.translatepunctuation)
.lower()
.split()
)
for k in words:
wordindex[k].append(index)
index += 1
if yieldcall is not None:
yieldcall()
return wordindex, fileindex
|
def getFilesIndex(
self, mtimes, oldmtimes, shareddirs, newsharedfiles, yieldcall=None, progress=None
):
wordindex = defaultdict(list)
fileindex = {}
index = 0
count = 0
for directory in mtimes:
virtualdir = self.real2virtual(directory)
if progress:
percent = float(count) / len(mtimes)
if percent <= 1.0:
GLib.idle_add(progress.set_fraction, percent)
count += 1
if self.hiddenCheck({"dir": directory}):
continue
for j in newsharedfiles[virtualdir]:
indexes = self.getIndexWords(virtualdir, j[0], shareddirs)
for k in indexes:
wordindex[k].append(index)
fileindex[str(index)] = ((virtualdir + "\\" + j[0]),) + j[1:]
index += 1
if yieldcall is not None:
yieldcall()
return wordindex, fileindex
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def addToShared(self, name):
"""Add a file to the normal shares database"""
config = self.config.sections
if not config["transfers"]["sharedownloaddir"]:
return
shared = config["transfers"]["sharedfiles"]
sharedstreams = config["transfers"]["sharedfilesstreams"]
wordindex = config["transfers"]["wordindex"]
fileindex = config["transfers"]["fileindex"]
shareddirs = [path for _name, path in config["transfers"]["shared"]]
shareddirs.append(config["transfers"]["downloaddir"])
sharedmtimes = config["transfers"]["sharedmtimes"]
dir = str(os.path.expanduser(os.path.dirname(name)))
vdir = self.real2virtual(dir)
file = str(os.path.basename(name))
shared[vdir] = shared.get(vdir, [])
if file not in [i[0] for i in shared[vdir]]:
fileinfo = self.getFileInfo(file, name)
shared[vdir] = shared[vdir] + [fileinfo]
sharedstreams[vdir] = self.getDirStream(shared[vdir])
words = self.getIndexWords(vdir, file, shareddirs)
self.addToIndex(wordindex, fileindex, words, vdir, fileinfo)
sharedmtimes[vdir] = os.path.getmtime(dir)
self.newnormalshares = True
if config["transfers"]["enablebuddyshares"]:
self.addToBuddyShared(name)
|
def addToShared(self, name):
"""Add a file to the normal shares database"""
config = self.config.sections
if not config["transfers"]["sharedownloaddir"]:
return
shared = config["transfers"]["sharedfiles"]
sharedstreams = config["transfers"]["sharedfilesstreams"]
wordindex = config["transfers"]["wordindex"]
fileindex = config["transfers"]["fileindex"]
shareddirs = [path for _name, path in config["transfers"]["shared"]]
shareddirs.append(config["transfers"]["downloaddir"])
sharedmtimes = config["transfers"]["sharedmtimes"]
dir = str(os.path.expanduser(os.path.dirname(name)))
vdir = self.real2virtual(dir)
file = str(os.path.basename(name))
shared[vdir] = shared.get(vdir, [])
if file not in [i[0] for i in shared[vdir]]:
fileinfo = self.getFileInfo(file, name)
shared[vdir] = shared[vdir] + [fileinfo]
sharedstreams[vdir] = self.getDirStream(shared[vdir])
words = self.getIndexWords(vdir, file, shareddirs)
self.addToIndex(wordindex, fileindex, words, vdir, fileinfo)
sharedmtimes[vdir] = os.path.getmtime(dir)
self.newnormalshares = True
if config["transfers"]["enablebuddyshares"]:
self.addToBuddyShared(name)
self.config.writeShares()
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def addToIndex(self, wordindex, fileindex, words, dir, fileinfo):
index = len(fileindex)
for i in words:
if i not in wordindex:
wordindex[i] = [index]
else:
wordindex[i] = wordindex[i] + [index]
fileindex.append((os.path.join(dir, fileinfo[0]),) + fileinfo[1:])
|
def addToIndex(self, wordindex, fileindex, words, dir, fileinfo):
index = len(list(fileindex.keys()))
for i in words:
if i not in wordindex:
wordindex[i] = [index]
else:
wordindex[i] = wordindex[i] + [index]
fileindex[str(index)] = (os.path.join(dir, fileinfo[0]),) + fileinfo[1:]
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/454
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/shelve.py", line 111, in __getitem__
value = self.cache[key]
KeyError: 'Klassiek\\Jean Sibelius\\Two Serenades for Violin and Orchestra, Op.69'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 2039, in _RescanFinished
self.np.shares.sendNumSharedFoldersFiles()
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 105, in sendNumSharedFoldersFiles
sharedfiles = sum([len(x) for x in list(conf["transfers"][shared_db].values())])
File "/usr/local/lib/python3.7/_collections_abc.py", line 762, in __iter__
yield self._mapping[key]
File "/usr/local/lib/python3.7/shelve.py", line 113, in __getitem__
f = BytesIO(self.dict[key.encode(self.keyencoding)])
File "/usr/local/lib/python3.7/shelve.py", line 70, in closed
raise ValueError('invalid operation on closed shelf')
ValueError: invalid operation on closed shelf
|
KeyError
|
def OnSelectUserResults(self, widget):
if len(self.selected_users) == 0:
return
selected_user = widget.get_parent().user
sel = self.ResultsList.get_selection()
fmodel = self.ResultsList.get_model()
sel.unselect_all()
iter = fmodel.get_iter_first()
SelectUserRowIter(fmodel, sel, 1, selected_user, iter)
self.select_results()
|
def OnSelectUserResults(self, widget):
if len(self.selected_users) == 0:
return
selected_user = widget.get_parent().user
sel = self.ResultsList.get_selection()
fmodel = self.ResultsList.get_model()
sel.unselect_all()
iter = self.resultsmodel.get_iter_first()
self.UserRowIter(fmodel, sel, selected_user, iter)
self.select_results()
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/360
|
Traceback (most recent call last):
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/frame.py", line 1297, in OnNetworkEvent
self.np.events[i.__class__](i)
File "/home/user/projects/nicotine-plus/pynicotine/pynicotine.py", line 1556, in QueueUpload
self.transfers.QueueUpload(msg)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 700, in QueueUpload
self._updateOrAppendUpload(user, msg.file, newupload)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 614, in _updateOrAppendUpload
self.uploadspanel.replace(i, transferobj)
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/transferlist.py", line 293, in replace
for i in self.transfers:
AttributeError: 'Uploads' object has no attribute 'transfers'
Traceback (most recent call last):
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/frame.py", line 1297, in OnNetworkEvent
self.np.events[i.__class__](i)
File "/home/user/projects/nicotine-plus/pynicotine/pynicotine.py", line 1556, in QueueUpload
self.transfers.QueueUpload(msg)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 700, in QueueUpload
self._updateOrAppendUpload(user, msg.file, newupload)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 614, in _updateOrAppendUpload
self.uploadspanel.replace(i, transferobj)
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/transferlist.py", line 293, in replace
for i in self.transfers:
AttributeError: 'Uploads' object has no attribute 'transfers'
|
AttributeError
|
def transferFile(
self,
direction,
user,
filename,
path="",
transfer=None,
size=None,
bitrate=None,
length=None,
realfilename=None,
):
"""Get a single file. path is a local path. if transfer object is
not None, update it, otherwise create a new one."""
if transfer is None:
transfer = Transfer(
user=user,
filename=filename,
path=path,
status="Getting status",
size=size,
bitrate=bitrate,
length=length,
)
if direction == 0:
self.downloads.append(transfer)
else:
self._appendUpload(user, filename, transfer)
else:
transfer.status = "Getting status"
try:
status = self.users[user].status
except KeyError:
status = None
if (
not direction
and self.eventprocessor.config.sections["transfers"]["enablefilters"]
):
# Only filter downloads, never uploads!
try:
downloadregexp = re.compile(
self.eventprocessor.config.sections["transfers"]["downloadregexp"], re.I
)
if downloadregexp.search(filename) is not None:
self.eventprocessor.logMessage(_("Filtering: %s") % filename, 5)
self.AbortTransfer(transfer)
# The string to be displayed on the GUI
transfer.status = "Filtered"
except Exception:
pass
if status is None:
if user not in self.eventprocessor.watchedusers:
self.queue.put(slskmessages.AddUser(user))
self.queue.put(slskmessages.GetUserStatus(user))
if transfer.status != "Filtered":
transfer.req = newId()
realpath = self.eventprocessor.shares.virtual2real(filename)
request = slskmessages.TransferRequest(
None,
direction,
transfer.req,
filename,
self.getFileSize(realpath),
realpath,
)
self.eventprocessor.ProcessRequestToPeer(user, request)
if direction == 0:
self.downloadspanel.update(transfer)
else:
self.uploadspanel.update(transfer)
|
def transferFile(
self,
direction,
user,
filename,
path="",
transfer=None,
size=None,
bitrate=None,
length=None,
realfilename=None,
):
"""Get a single file. path is a local path. if transfer object is
not None, update it, otherwise create a new one."""
if transfer is None:
transfer = Transfer(
user=user,
filename=filename,
path=path,
status="Getting status",
size=size,
bitrate=bitrate,
length=length,
)
if direction == 0:
self.downloads.append(transfer)
else:
self._updateOrAppendUpload(user, filename, transfer)
else:
transfer.status = "Getting status"
try:
status = self.users[user].status
except KeyError:
status = None
if (
not direction
and self.eventprocessor.config.sections["transfers"]["enablefilters"]
):
# Only filter downloads, never uploads!
try:
downloadregexp = re.compile(
self.eventprocessor.config.sections["transfers"]["downloadregexp"], re.I
)
if downloadregexp.search(filename) is not None:
self.eventprocessor.logMessage(_("Filtering: %s") % filename, 5)
self.AbortTransfer(transfer)
# The string to be displayed on the GUI
transfer.status = "Filtered"
except Exception:
pass
if status is None:
if user not in self.eventprocessor.watchedusers:
self.queue.put(slskmessages.AddUser(user))
self.queue.put(slskmessages.GetUserStatus(user))
if transfer.status != "Filtered":
transfer.req = newId()
realpath = self.eventprocessor.shares.virtual2real(filename)
request = slskmessages.TransferRequest(
None,
direction,
transfer.req,
filename,
self.getFileSize(realpath),
realpath,
)
self.eventprocessor.ProcessRequestToPeer(user, request)
if direction == 0:
self.downloadspanel.update(transfer)
else:
self.uploadspanel.update(transfer)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/360
|
Traceback (most recent call last):
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/frame.py", line 1297, in OnNetworkEvent
self.np.events[i.__class__](i)
File "/home/user/projects/nicotine-plus/pynicotine/pynicotine.py", line 1556, in QueueUpload
self.transfers.QueueUpload(msg)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 700, in QueueUpload
self._updateOrAppendUpload(user, msg.file, newupload)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 614, in _updateOrAppendUpload
self.uploadspanel.replace(i, transferobj)
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/transferlist.py", line 293, in replace
for i in self.transfers:
AttributeError: 'Uploads' object has no attribute 'transfers'
Traceback (most recent call last):
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/frame.py", line 1297, in OnNetworkEvent
self.np.events[i.__class__](i)
File "/home/user/projects/nicotine-plus/pynicotine/pynicotine.py", line 1556, in QueueUpload
self.transfers.QueueUpload(msg)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 700, in QueueUpload
self._updateOrAppendUpload(user, msg.file, newupload)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 614, in _updateOrAppendUpload
self.uploadspanel.replace(i, transferobj)
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/transferlist.py", line 293, in replace
for i in self.transfers:
AttributeError: 'Uploads' object has no attribute 'transfers'
|
AttributeError
|
def _TransferRequestUploads(self, msg, user, conn, addr):
# Is user alllowed to download?
checkuser, reason = self.eventprocessor.CheckUser(user, addr)
if not checkuser:
return slskmessages.TransferResponse(conn, 0, reason=reason, req=msg.req)
# Do we actually share that file with the world?
realpath = self.eventprocessor.shares.virtual2real(msg.file)
if not self.fileIsShared(user, msg.file, realpath):
return slskmessages.TransferResponse(
conn, 0, reason="File not shared", req=msg.req
)
# Is that file already in the queue?
if self.fileIsUploadQueued(user, msg.file):
return slskmessages.TransferResponse(conn, 0, reason="Queued", req=msg.req)
# Has user hit queue limit?
friend = user in [i[0] for i in self.eventprocessor.userlist.userlist]
if friend and self.eventprocessor.config.sections["transfers"]["friendsnolimits"]:
limits = False
else:
limits = True
if limits and self.queueLimitReached(user):
uploadslimit = self.eventprocessor.config.sections["transfers"]["queuelimit"]
return slskmessages.TransferResponse(
conn,
0,
reason="User limit of %i megabytes exceeded" % (uploadslimit),
req=msg.req,
)
if limits and self.fileLimitReached(user):
filelimit = self.eventprocessor.config.sections["transfers"]["filelimit"]
limitmsg = "User limit of %i files exceeded" % (filelimit)
return slskmessages.TransferResponse(conn, 0, reason=limitmsg, req=msg.req)
# All checks passed, user can queue file!
self.eventprocessor.frame.pluginhandler.UploadQueuedNotification(
user, msg.file, realpath
)
# Is user already downloading/negotiating a download?
if not self.allowNewUploads() or user in self.getTransferringUsers():
response = slskmessages.TransferResponse(conn, 0, reason="Queued", req=msg.req)
newupload = Transfer(
user=user,
filename=msg.file,
realfilename=realpath,
path=os.path.dirname(realpath),
status="Queued",
timequeued=time.time(),
size=self.getFileSize(realpath),
place=len(self.uploads),
)
self._appendUpload(user, msg.file, newupload)
self.uploadspanel.update(newupload)
self.addQueued(user, realpath)
return response
# All checks passed, starting a new upload.
size = self.getFileSize(realpath)
response = slskmessages.TransferResponse(conn, 1, req=msg.req, filesize=size)
transfertimeout = TransferTimeout(msg.req, self.eventprocessor.frame.callback)
transferobj = Transfer(
user=user,
realfilename=realpath,
filename=realpath,
path=os.path.dirname(realpath),
status="Waiting for upload",
req=msg.req,
size=size,
place=len(self.uploads),
)
self._appendUpload(user, msg.file, transferobj)
transferobj.transfertimer = threading.Timer(30.0, transfertimeout.timeout)
transferobj.transfertimer.setDaemon(True)
transferobj.transfertimer.start()
self.uploadspanel.update(transferobj)
return response
|
def _TransferRequestUploads(self, msg, user, conn, addr):
# Is user alllowed to download?
checkuser, reason = self.eventprocessor.CheckUser(user, addr)
if not checkuser:
return slskmessages.TransferResponse(conn, 0, reason=reason, req=msg.req)
# Do we actually share that file with the world?
realpath = self.eventprocessor.shares.virtual2real(msg.file)
if not self.fileIsShared(user, msg.file, realpath):
return slskmessages.TransferResponse(
conn, 0, reason="File not shared", req=msg.req
)
# Is that file already in the queue?
if self.fileIsUploadQueued(user, msg.file):
return slskmessages.TransferResponse(conn, 0, reason="Queued", req=msg.req)
# Has user hit queue limit?
friend = user in [i[0] for i in self.eventprocessor.userlist.userlist]
if friend and self.eventprocessor.config.sections["transfers"]["friendsnolimits"]:
limits = False
else:
limits = True
if limits and self.queueLimitReached(user):
uploadslimit = self.eventprocessor.config.sections["transfers"]["queuelimit"]
return slskmessages.TransferResponse(
conn,
0,
reason="User limit of %i megabytes exceeded" % (uploadslimit),
req=msg.req,
)
if limits and self.fileLimitReached(user):
filelimit = self.eventprocessor.config.sections["transfers"]["filelimit"]
limitmsg = "User limit of %i files exceeded" % (filelimit)
return slskmessages.TransferResponse(conn, 0, reason=limitmsg, req=msg.req)
# All checks passed, user can queue file!
self.eventprocessor.frame.pluginhandler.UploadQueuedNotification(
user, msg.file, realpath
)
# Is user already downloading/negotiating a download?
if not self.allowNewUploads() or user in self.getTransferringUsers():
response = slskmessages.TransferResponse(conn, 0, reason="Queued", req=msg.req)
newupload = Transfer(
user=user,
filename=msg.file,
realfilename=realpath,
path=os.path.dirname(realpath),
status="Queued",
timequeued=time.time(),
size=self.getFileSize(realpath),
place=len(self.uploads),
)
self._updateOrAppendUpload(user, msg.file, newupload)
self.uploadspanel.update(newupload)
self.addQueued(user, realpath)
return response
# All checks passed, starting a new upload.
size = self.getFileSize(realpath)
response = slskmessages.TransferResponse(conn, 1, req=msg.req, filesize=size)
transfertimeout = TransferTimeout(msg.req, self.eventprocessor.frame.callback)
transferobj = Transfer(
user=user,
realfilename=realpath,
filename=realpath,
path=os.path.dirname(realpath),
status="Waiting for upload",
req=msg.req,
size=size,
place=len(self.uploads),
)
self._updateOrAppendUpload(user, msg.file, transferobj)
transferobj.transfertimer = threading.Timer(30.0, transfertimeout.timeout)
transferobj.transfertimer.setDaemon(True)
transferobj.transfertimer.start()
self.uploadspanel.update(transferobj)
return response
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/360
|
Traceback (most recent call last):
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/frame.py", line 1297, in OnNetworkEvent
self.np.events[i.__class__](i)
File "/home/user/projects/nicotine-plus/pynicotine/pynicotine.py", line 1556, in QueueUpload
self.transfers.QueueUpload(msg)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 700, in QueueUpload
self._updateOrAppendUpload(user, msg.file, newupload)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 614, in _updateOrAppendUpload
self.uploadspanel.replace(i, transferobj)
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/transferlist.py", line 293, in replace
for i in self.transfers:
AttributeError: 'Uploads' object has no attribute 'transfers'
Traceback (most recent call last):
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/frame.py", line 1297, in OnNetworkEvent
self.np.events[i.__class__](i)
File "/home/user/projects/nicotine-plus/pynicotine/pynicotine.py", line 1556, in QueueUpload
self.transfers.QueueUpload(msg)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 700, in QueueUpload
self._updateOrAppendUpload(user, msg.file, newupload)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 614, in _updateOrAppendUpload
self.uploadspanel.replace(i, transferobj)
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/transferlist.py", line 293, in replace
for i in self.transfers:
AttributeError: 'Uploads' object has no attribute 'transfers'
|
AttributeError
|
def QueueUpload(self, msg):
"""Peer remotely(?) queued a download (upload here)"""
user = None
for i in self.peerconns:
if i.conn is msg.conn.conn:
user = i.username
if user is None:
return
addr = msg.conn.addr[0]
realpath = self.eventprocessor.shares.virtual2real(msg.file)
if not self.fileIsUploadQueued(user, msg.file):
friend = user in [i[0] for i in self.eventprocessor.userlist.userlist]
if (
friend
and self.eventprocessor.config.sections["transfers"]["friendsnolimits"]
):
limits = 0
else:
limits = 1
checkuser, reason = self.eventprocessor.CheckUser(user, addr)
if not checkuser:
self.queue.put(
slskmessages.QueueFailed(
conn=msg.conn.conn, file=msg.file, reason=reason
)
)
elif limits and self.queueLimitReached(user):
uploadslimit = self.eventprocessor.config.sections["transfers"][
"queuelimit"
]
limitmsg = "User limit of %i megabytes exceeded" % (uploadslimit)
self.queue.put(
slskmessages.QueueFailed(
conn=msg.conn.conn, file=msg.file, reason=limitmsg
)
)
elif limits and self.fileLimitReached(user):
filelimit = self.eventprocessor.config.sections["transfers"]["filelimit"]
limitmsg = "User limit of %i files exceeded" % (filelimit)
self.queue.put(
slskmessages.QueueFailed(
conn=msg.conn.conn, file=msg.file, reason=limitmsg
)
)
elif self.fileIsShared(user, msg.file, realpath):
newupload = Transfer(
user=user,
filename=msg.file,
realfilename=realpath,
path=os.path.dirname(realpath),
status="Queued",
timequeued=time.time(),
size=self.getFileSize(realpath),
)
self._appendUpload(user, msg.file, newupload)
self.uploadspanel.update(newupload)
self.addQueued(user, msg.file)
self.eventprocessor.frame.pluginhandler.UploadQueuedNotification(
user, msg.file, realpath
)
else:
self.queue.put(
slskmessages.QueueFailed(
conn=msg.conn.conn, file=msg.file, reason="File not shared"
)
)
self.eventprocessor.logMessage(
_("Queued upload request: User %(user)s, %(msg)s")
% {"user": user, "msg": str(vars(msg))},
5,
)
self.checkUploadQueue()
|
def QueueUpload(self, msg):
"""Peer remotely(?) queued a download (upload here)"""
user = None
for i in self.peerconns:
if i.conn is msg.conn.conn:
user = i.username
if user is None:
return
addr = msg.conn.addr[0]
realpath = self.eventprocessor.shares.virtual2real(msg.file)
if not self.fileIsUploadQueued(user, msg.file):
friend = user in [i[0] for i in self.eventprocessor.userlist.userlist]
if (
friend
and self.eventprocessor.config.sections["transfers"]["friendsnolimits"]
):
limits = 0
else:
limits = 1
checkuser, reason = self.eventprocessor.CheckUser(user, addr)
if not checkuser:
self.queue.put(
slskmessages.QueueFailed(
conn=msg.conn.conn, file=msg.file, reason=reason
)
)
elif limits and self.queueLimitReached(user):
uploadslimit = self.eventprocessor.config.sections["transfers"][
"queuelimit"
]
limitmsg = "User limit of %i megabytes exceeded" % (uploadslimit)
self.queue.put(
slskmessages.QueueFailed(
conn=msg.conn.conn, file=msg.file, reason=limitmsg
)
)
elif limits and self.fileLimitReached(user):
filelimit = self.eventprocessor.config.sections["transfers"]["filelimit"]
limitmsg = "User limit of %i files exceeded" % (filelimit)
self.queue.put(
slskmessages.QueueFailed(
conn=msg.conn.conn, file=msg.file, reason=limitmsg
)
)
elif self.fileIsShared(user, msg.file, realpath):
newupload = Transfer(
user=user,
filename=msg.file,
realfilename=realpath,
path=os.path.dirname(realpath),
status="Queued",
timequeued=time.time(),
size=self.getFileSize(realpath),
)
self._updateOrAppendUpload(user, msg.file, newupload)
self.uploadspanel.update(newupload)
self.addQueued(user, msg.file)
self.eventprocessor.frame.pluginhandler.UploadQueuedNotification(
user, msg.file, realpath
)
else:
self.queue.put(
slskmessages.QueueFailed(
conn=msg.conn.conn, file=msg.file, reason="File not shared"
)
)
self.eventprocessor.logMessage(
_("Queued upload request: User %(user)s, %(msg)s")
% {"user": user, "msg": str(vars(msg))},
5,
)
self.checkUploadQueue()
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/360
|
Traceback (most recent call last):
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/frame.py", line 1297, in OnNetworkEvent
self.np.events[i.__class__](i)
File "/home/user/projects/nicotine-plus/pynicotine/pynicotine.py", line 1556, in QueueUpload
self.transfers.QueueUpload(msg)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 700, in QueueUpload
self._updateOrAppendUpload(user, msg.file, newupload)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 614, in _updateOrAppendUpload
self.uploadspanel.replace(i, transferobj)
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/transferlist.py", line 293, in replace
for i in self.transfers:
AttributeError: 'Uploads' object has no attribute 'transfers'
Traceback (most recent call last):
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/frame.py", line 1297, in OnNetworkEvent
self.np.events[i.__class__](i)
File "/home/user/projects/nicotine-plus/pynicotine/pynicotine.py", line 1556, in QueueUpload
self.transfers.QueueUpload(msg)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 700, in QueueUpload
self._updateOrAppendUpload(user, msg.file, newupload)
File "/home/user/projects/nicotine-plus/pynicotine/transfers.py", line 614, in _updateOrAppendUpload
self.uploadspanel.replace(i, transferobj)
File "/home/user/projects/nicotine-plus/pynicotine/gtkgui/transferlist.py", line 293, in replace
for i in self.transfers:
AttributeError: 'Uploads' object has no attribute 'transfers'
|
AttributeError
|
def readConfig(self):
self.config_lock.acquire()
self.sections["transfers"]["downloads"] = []
if exists(os.path.join(self.data_dir, "transfers.pickle")):
# <1.2.13 stored transfers inside the main config
try:
handle = open(os.path.join(self.data_dir, "transfers.pickle"), "rb")
except IOError as inst:
log.addwarning(
_("Something went wrong while opening your transfer list: %(error)s")
% {"error": str(inst)}
)
else:
try:
self.sections["transfers"]["downloads"] = pickle.load(handle)
except (IOError, EOFError, ValueError) as inst:
log.addwarning(
_(
"Something went wrong while reading your transfer list: %(error)s"
)
% {"error": str(inst)}
)
try:
handle.close()
except Exception:
pass
path, fn = os.path.split(self.filename)
try:
if not os.path.isdir(path):
os.makedirs(path)
except OSError as msg:
log.addwarning("Can't create directory '%s', reported error: %s" % (path, msg))
try:
if not os.path.isdir(self.data_dir):
os.makedirs(self.data_dir)
except OSError as msg:
log.addwarning("Can't create directory '%s', reported error: %s" % (path, msg))
# Transition from 1.2.16 -> 1.4.0
# Do the cleanup early so we don't get the annoying
# 'Unknown config option ...' message
self.removeOldOption("transfers", "pmqueueddir")
self.removeOldOption("server", "lastportstatuscheck")
self.removeOldOption("server", "serverlist")
self.removeOldOption("userinfo", "descrutf8")
self.removeOldOption("ui", "enabletrans")
self.removeOldOption("ui", "mozembed")
self.removeOldOption("ui", "open_in_mozembed")
self.removeOldOption("ui", "tooltips")
self.removeOldOption("ui", "transalpha")
self.removeOldOption("ui", "transfilter")
self.removeOldOption("ui", "transtint")
self.removeOldOption("language", "language")
self.removeOldOption("language", "setlanguage")
self.removeOldSection("language")
# Transition from 1.4.1 -> 1.4.2
self.removeOldOption("columns", "downloads")
self.removeOldOption("columns", "uploads")
# Remove old encoding settings (1.4.3)
self.removeOldOption("server", "enc")
self.removeOldOption("server", "fallbackencodings")
self.removeOldOption("server", "roomencoding")
self.removeOldOption("server", "userencoding")
# Remove soundcommand config, replaced by GSound (1.4.3)
self.removeOldOption("ui", "soundcommand")
# Checking for unknown section/options
unknown1 = [
"login",
"passw",
"enc",
"downloaddir",
"uploaddir",
"customban",
"descr",
"pic",
"logsdir",
"roomlogsdir",
"privatelogsdir",
"incompletedir",
"autoreply",
"afterfinish",
"downloadregexp",
"afterfolder",
"default",
"chatfont",
"npothercommand",
"npplayer",
"npformat",
"private_timestamp",
"rooms_timestamp",
"log_timestamp",
]
unknown2 = {
"ui": [
"roomlistcollapsed",
"tab_select_previous",
"tabclosers",
"tab_colors",
"tab_reorderable",
"buddylistinchatrooms",
"trayicon",
"showaway",
"usernamehotspots",
"exitdialog",
"tab_icons",
"spellcheck",
"modes_order",
"modes_visible",
"chat_hidebuttons",
"tab_status_icons",
"notexists",
"soundenabled",
"speechenabled",
"enablefilters",
"width",
"height",
"xposition",
"yposition",
"labelmain",
"labelrooms",
"labelprivate",
"labelinfo",
"labelbrowse",
"labelsearch",
],
"words": [
"completion",
"censorwords",
"replacewords",
"autoreplaced",
"censored",
"characters",
"tab",
"cycle",
"dropdown",
"roomnames",
"buddies",
"roomusers",
"commands",
"aliases",
"onematch",
],
}
for i in self.parser.sections():
for j in self.parser.options(i):
val = self.parser.get(i, j, raw=1)
if i not in self.sections:
log.addwarning(_("Unknown config section '%s'") % i)
elif j not in self.sections[i] and not (j == "filter" or i in ("plugins",)):
log.addwarning(
_("Unknown config option '%(option)s' in section '%(section)s'")
% {"option": j, "section": i}
)
elif j in unknown1 or (i in unknown2 and j not in unknown2[i]):
if val is not None and val != "None":
self.sections[i][j] = val
else:
self.sections[i][j] = None
else:
try:
self.sections[i][j] = eval(val, {})
except Exception:
self.sections[i][j] = None
log.addwarning(
"CONFIG ERROR: Couldn't decode '%s' section '%s' value '%s'"
% (str(j), str(i), str(val))
)
# Convert fs-based shared to virtual shared (pre 1.4.0)
def _convert_to_virtual(x):
if isinstance(x, tuple):
return x
virtual = x.replace("/", "_").replace("\\", "_").strip("_")
log.addwarning(
"Renaming shared folder '%s' to '%s'. A rescan of your share is required."
% (x, virtual)
)
return (virtual, x)
self.sections["transfers"]["shared"] = [
_convert_to_virtual(x) for x in self.sections["transfers"]["shared"]
]
self.sections["transfers"]["buddyshared"] = [
_convert_to_virtual(x) for x in self.sections["transfers"]["buddyshared"]
]
sharedfiles = None
bsharedfiles = None
sharedfilesstreams = None
bsharedfilesstreams = None
wordindex = None
bwordindex = None
fileindex = None
bfileindex = None
sharedmtimes = None
bsharedmtimes = None
shelves = [
os.path.join(self.data_dir, "files.db"),
os.path.join(self.data_dir, "buddyfiles.db"),
os.path.join(self.data_dir, "streams.db"),
os.path.join(self.data_dir, "buddystreams.db"),
os.path.join(self.data_dir, "wordindex.db"),
os.path.join(self.data_dir, "buddywordindex.db"),
os.path.join(self.data_dir, "fileindex.db"),
os.path.join(self.data_dir, "buddyfileindex.db"),
os.path.join(self.data_dir, "mtimes.db"),
os.path.join(self.data_dir, "buddymtimes.db"),
]
_opened_shelves = []
_errors = []
for shelvefile in shelves:
try:
_opened_shelves.append(
shelve.open(shelvefile, protocol=pickle.HIGHEST_PROTOCOL)
)
except Exception:
_errors.append(shelvefile)
try:
os.unlink(shelvefile)
_opened_shelves.append(
shelve.open(shelvefile, flag="n", protocol=pickle.HIGHEST_PROTOCOL)
)
except Exception as ex:
print(("Failed to unlink %s: %s" % (shelvefile, ex)))
sharedfiles = _opened_shelves.pop(0)
bsharedfiles = _opened_shelves.pop(0)
sharedfilesstreams = _opened_shelves.pop(0)
bsharedfilesstreams = _opened_shelves.pop(0)
wordindex = _opened_shelves.pop(0)
bwordindex = _opened_shelves.pop(0)
fileindex = _opened_shelves.pop(0)
bfileindex = _opened_shelves.pop(0)
sharedmtimes = _opened_shelves.pop(0)
bsharedmtimes = _opened_shelves.pop(0)
if _errors:
log.addwarning(
_("Failed to process the following databases: %(names)s")
% {"names": "\n".join(_errors)}
)
files = self.clearShares(
sharedfiles,
bsharedfiles,
sharedfilesstreams,
bsharedfilesstreams,
wordindex,
bwordindex,
fileindex,
bfileindex,
sharedmtimes,
bsharedmtimes,
)
if files is not None:
(
sharedfiles,
bsharedfiles,
sharedfilesstreams,
bsharedfilesstreams,
wordindex,
bwordindex,
fileindex,
bfileindex,
sharedmtimes,
bsharedmtimes,
) = files
log.addwarning(
_("Shared files database seems to be corrupted, rescan your shares")
)
self.sections["transfers"]["sharedfiles"] = sharedfiles
self.sections["transfers"]["sharedfilesstreams"] = sharedfilesstreams
self.sections["transfers"]["wordindex"] = wordindex
self.sections["transfers"]["fileindex"] = fileindex
self.sections["transfers"]["sharedmtimes"] = sharedmtimes
self.sections["transfers"]["bsharedfiles"] = bsharedfiles
self.sections["transfers"]["bsharedfilesstreams"] = bsharedfilesstreams
self.sections["transfers"]["bwordindex"] = bwordindex
self.sections["transfers"]["bfileindex"] = bfileindex
self.sections["transfers"]["bsharedmtimes"] = bsharedmtimes
# Setting the port range in numerical order
self.sections["server"]["portrange"] = (
min(self.sections["server"]["portrange"]),
max(self.sections["server"]["portrange"]),
)
self.config_lock.release()
|
def readConfig(self):
self.config_lock.acquire()
self.sections["transfers"]["downloads"] = []
if exists(os.path.join(self.data_dir, "transfers.pickle")):
# <1.2.13 stored transfers inside the main config
try:
handle = open(os.path.join(self.data_dir, "transfers.pickle"), "rb")
except IOError as inst:
log.addwarning(
_("Something went wrong while opening your transfer list: %(error)s")
% {"error": str(inst)}
)
else:
try:
self.sections["transfers"]["downloads"] = pickle.load(handle)
except (IOError, EOFError, ValueError) as inst:
log.addwarning(
_(
"Something went wrong while reading your transfer list: %(error)s"
)
% {"error": str(inst)}
)
try:
handle.close()
except Exception:
pass
path, fn = os.path.split(self.filename)
try:
if not os.path.isdir(path):
os.makedirs(path)
except OSError as msg:
log.addwarning("Can't create directory '%s', reported error: %s" % (path, msg))
try:
if not os.path.isdir(self.data_dir):
os.makedirs(self.data_dir)
except OSError as msg:
log.addwarning("Can't create directory '%s', reported error: %s" % (path, msg))
# Transition from 1.2.16 -> 1.4.0
# Do the cleanup early so we don't get the annoying
# 'Unknown config option ...' message
self.removeOldOption("transfers", "pmqueueddir")
self.removeOldOption("server", "lastportstatuscheck")
self.removeOldOption("server", "serverlist")
self.removeOldOption("userinfo", "descrutf8")
self.removeOldOption("ui", "enabletrans")
self.removeOldOption("ui", "mozembed")
self.removeOldOption("ui", "open_in_mozembed")
self.removeOldOption("ui", "tooltips")
self.removeOldOption("ui", "transalpha")
self.removeOldOption("ui", "transfilter")
self.removeOldOption("ui", "transtint")
self.removeOldOption("language", "language")
self.removeOldOption("language", "setlanguage")
self.removeOldSection("language")
# Transition from 1.4.1 -> 1.4.2
self.removeOldOption("columns", "downloads")
self.removeOldOption("columns", "uploads")
# Remove old encoding settings (1.4.3)
self.removeOldOption("server", "enc")
self.removeOldOption("server", "fallbackencodings")
self.removeOldOption("server", "roomencoding")
self.removeOldOption("server", "userencoding")
# Remove soundcommand config, replaced by GSound (1.4.3)
self.removeOldOption("ui", "soundcommand")
# Checking for unknown section/options
unknown1 = [
"login",
"passw",
"enc",
"downloaddir",
"uploaddir",
"customban",
"descr",
"pic",
"logsdir",
"roomlogsdir",
"privatelogsdir",
"incompletedir",
"autoreply",
"afterfinish",
"downloadregexp",
"afterfolder",
"default",
"chatfont",
"npothercommand",
"npplayer",
"npformat",
"private_timestamp",
"rooms_timestamp",
"log_timestamp",
]
unknown2 = {
"ui": [
"roomlistcollapsed",
"tab_select_previous",
"tabclosers",
"tab_colors",
"tab_reorderable",
"buddylistinchatrooms",
"trayicon",
"showaway",
"usernamehotspots",
"exitdialog",
"tab_icons",
"spellcheck",
"modes_order",
"modes_visible",
"chat_hidebuttons",
"tab_status_icons",
"notexists",
"soundenabled",
"speechenabled",
"enablefilters",
"width",
"height",
"xposition",
"yposition",
"labelmain",
"labelrooms",
"labelprivate",
"labelinfo",
"labelbrowse",
"labelsearch",
],
"words": [
"completion",
"censorwords",
"replacewords",
"autoreplaced",
"censored",
"characters",
"tab",
"cycle",
"dropdown",
"roomnames",
"buddies",
"roomusers",
"commands",
"aliases",
"onematch",
],
}
for i in self.parser.sections():
for j in self.parser.options(i):
val = self.parser.get(i, j, raw=1)
if i not in self.sections:
log.addwarning(_("Unknown config section '%s'") % i)
elif j not in self.sections[i] and not (j == "filter" or i in ("plugins",)):
log.addwarning(
_("Unknown config option '%(option)s' in section '%(section)s'")
% {"option": j, "section": i}
)
elif j in unknown1 or (i in unknown2 and j not in unknown2[i]):
if val is not None and val != "None":
self.sections[i][j] = val
else:
self.sections[i][j] = None
else:
try:
self.sections[i][j] = eval(val, {})
except Exception:
self.sections[i][j] = None
log.addwarning(
"CONFIG ERROR: Couldn't decode '%s' section '%s' value '%s'"
% (str(j), str(i), str(val))
)
# Convert fs-based shared to virtual shared (pre 1.4.0)
def _convert_to_virtual(x):
if isinstance(x, tuple):
return x
virtual = x.replace("/", "_").replace("\\", "_").strip("_")
log.addwarning(
"Renaming shared folder '%s' to '%s'. A rescan of your share is required."
% (x, virtual)
)
return (virtual, x)
self.sections["transfers"]["shared"] = [
_convert_to_virtual(x) for x in self.sections["transfers"]["shared"]
]
self.sections["transfers"]["buddyshared"] = [
_convert_to_virtual(x) for x in self.sections["transfers"]["buddyshared"]
]
sharedfiles = None
bsharedfiles = None
sharedfilesstreams = None
bsharedfilesstreams = None
wordindex = None
bwordindex = None
fileindex = None
bfileindex = None
sharedmtimes = None
bsharedmtimes = None
shelves = [
os.path.join(self.data_dir, "files.db"),
os.path.join(self.data_dir, "buddyfiles.db"),
os.path.join(self.data_dir, "streams.db"),
os.path.join(self.data_dir, "buddystreams.db"),
os.path.join(self.data_dir, "wordindex.db"),
os.path.join(self.data_dir, "buddywordindex.db"),
os.path.join(self.data_dir, "fileindex.db"),
os.path.join(self.data_dir, "buddyfileindex.db"),
os.path.join(self.data_dir, "mtimes.db"),
os.path.join(self.data_dir, "buddymtimes.db"),
]
_opened_shelves = []
_errors = []
for shelvefile in shelves:
try:
_opened_shelves.append(shelve.open(shelvefile))
except Exception:
_errors.append(shelvefile)
try:
os.unlink(shelvefile)
_opened_shelves.append(shelve.open(shelvefile, flag="n"))
except Exception as ex:
print(("Failed to unlink %s: %s" % (shelvefile, ex)))
sharedfiles = _opened_shelves.pop(0)
bsharedfiles = _opened_shelves.pop(0)
sharedfilesstreams = _opened_shelves.pop(0)
bsharedfilesstreams = _opened_shelves.pop(0)
wordindex = _opened_shelves.pop(0)
bwordindex = _opened_shelves.pop(0)
fileindex = _opened_shelves.pop(0)
bfileindex = _opened_shelves.pop(0)
sharedmtimes = _opened_shelves.pop(0)
bsharedmtimes = _opened_shelves.pop(0)
if _errors:
log.addwarning(
_("Failed to process the following databases: %(names)s")
% {"names": "\n".join(_errors)}
)
files = self.clearShares(
sharedfiles,
bsharedfiles,
sharedfilesstreams,
bsharedfilesstreams,
wordindex,
bwordindex,
fileindex,
bfileindex,
sharedmtimes,
bsharedmtimes,
)
if files is not None:
(
sharedfiles,
bsharedfiles,
sharedfilesstreams,
bsharedfilesstreams,
wordindex,
bwordindex,
fileindex,
bfileindex,
sharedmtimes,
bsharedmtimes,
) = files
log.addwarning(
_("Shared files database seems to be corrupted, rescan your shares")
)
self.sections["transfers"]["sharedfiles"] = sharedfiles
self.sections["transfers"]["sharedfilesstreams"] = sharedfilesstreams
self.sections["transfers"]["wordindex"] = wordindex
self.sections["transfers"]["fileindex"] = fileindex
self.sections["transfers"]["sharedmtimes"] = sharedmtimes
self.sections["transfers"]["bsharedfiles"] = bsharedfiles
self.sections["transfers"]["bsharedfilesstreams"] = bsharedfilesstreams
self.sections["transfers"]["bwordindex"] = bwordindex
self.sections["transfers"]["bfileindex"] = bfileindex
self.sections["transfers"]["bsharedmtimes"] = bsharedmtimes
# Setting the port range in numerical order
self.sections["server"]["portrange"] = (
min(self.sections["server"]["portrange"]),
max(self.sections["server"]["portrange"]),
)
self.config_lock.release()
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def clearShares(
self,
sharedfiles,
bsharedfiles,
sharedfilesstreams,
bsharedfilesstreams,
wordindex,
bwordindex,
fileindex,
bfileindex,
sharedmtimes,
bsharedmtimes,
):
try:
if sharedfiles:
sharedfiles.close()
try:
os.unlink(os.path.join(self.data_dir, "files.db"))
except Exception:
pass
sharedfiles = shelve.open(
os.path.join(self.data_dir, "files.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
if bsharedfiles:
bsharedfiles.close()
try:
os.unlink(os.path.join(self.data_dir, "buddyfiles.db"))
except Exception:
pass
bsharedfiles = shelve.open(
os.path.join(self.data_dir, "buddyfiles.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
if sharedfilesstreams:
sharedfilesstreams.close()
try:
os.unlink(os.path.join(self.data_dir, "streams.db"))
except Exception:
pass
sharedfilesstreams = shelve.open(
os.path.join(self.data_dir, "streams.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
if bsharedfilesstreams:
bsharedfilesstreams.close()
try:
os.unlink(os.path.join(self.data_dir, "buddystreams.db"))
except Exception:
pass
bsharedfilesstreams = shelve.open(
os.path.join(self.data_dir, "buddystreams.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
if wordindex:
wordindex.close()
try:
os.unlink(os.path.join(self.data_dir, "wordindex.db"))
except Exception:
pass
wordindex = shelve.open(
os.path.join(self.data_dir, "wordindex.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
if bwordindex:
bwordindex.close()
try:
os.unlink(os.path.join(self.data_dir, "buddywordindex.db"))
except Exception:
pass
bwordindex = shelve.open(
os.path.join(self.data_dir, "buddywordindex.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
if fileindex:
fileindex.close()
try:
os.unlink(os.path.join(self.data_dir, "fileindex.db"))
except Exception:
pass
fileindex = shelve.open(
os.path.join(self.data_dir, "fileindex.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
if bfileindex:
bfileindex.close()
try:
os.unlink(os.path.join(self.data_dir, "buddyfileindex.db"))
except Exception:
pass
bfileindex = shelve.open(
os.path.join(self.data_dir, "buddyfileindex.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
if sharedmtimes:
sharedmtimes.close()
try:
os.unlink(os.path.join(self.data_dir, "mtimes.db"))
except Exception:
pass
sharedmtimes = shelve.open(
os.path.join(self.data_dir, "mtimes.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
if bsharedmtimes:
bsharedmtimes.close()
try:
os.unlink(os.path.join(self.data_dir, "buddymtimes.db"))
except Exception:
pass
bsharedmtimes = shelve.open(
os.path.join(self.data_dir, "buddymtimes.db"),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
except Exception as error:
log.addwarning(_("Error while writing database files: %s") % error)
return None
return (
sharedfiles,
bsharedfiles,
sharedfilesstreams,
bsharedfilesstreams,
wordindex,
bwordindex,
fileindex,
bfileindex,
sharedmtimes,
bsharedmtimes,
)
|
def clearShares(
self,
sharedfiles,
bsharedfiles,
sharedfilesstreams,
bsharedfilesstreams,
wordindex,
bwordindex,
fileindex,
bfileindex,
sharedmtimes,
bsharedmtimes,
):
try:
if sharedfiles:
sharedfiles.close()
try:
os.unlink(os.path.join(self.data_dir, "files.db"))
except Exception:
pass
sharedfiles = shelve.open(os.path.join(self.data_dir, "files.db"), flag="n")
if bsharedfiles:
bsharedfiles.close()
try:
os.unlink(os.path.join(self.data_dir, "buddyfiles.db"))
except Exception:
pass
bsharedfiles = shelve.open(
os.path.join(self.data_dir, "buddyfiles.db"), flag="n"
)
if sharedfilesstreams:
sharedfilesstreams.close()
try:
os.unlink(os.path.join(self.data_dir, "streams.db"))
except Exception:
pass
sharedfilesstreams = shelve.open(
os.path.join(self.data_dir, "streams.db"), flag="n"
)
if bsharedfilesstreams:
bsharedfilesstreams.close()
try:
os.unlink(os.path.join(self.data_dir, "buddystreams.db"))
except Exception:
pass
bsharedfilesstreams = shelve.open(
os.path.join(self.data_dir, "buddystreams.db"), flag="n"
)
if wordindex:
wordindex.close()
try:
os.unlink(os.path.join(self.data_dir, "wordindex.db"))
except Exception:
pass
wordindex = shelve.open(os.path.join(self.data_dir, "wordindex.db"), flag="n")
if bwordindex:
bwordindex.close()
try:
os.unlink(os.path.join(self.data_dir, "buddywordindex.db"))
except Exception:
pass
bwordindex = shelve.open(
os.path.join(self.data_dir, "buddywordindex.db"), flag="n"
)
if fileindex:
fileindex.close()
try:
os.unlink(os.path.join(self.data_dir, "fileindex.db"))
except Exception:
pass
fileindex = shelve.open(os.path.join(self.data_dir, "fileindex.db"), flag="n")
if bfileindex:
bfileindex.close()
try:
os.unlink(os.path.join(self.data_dir, "buddyfileindex.db"))
except Exception:
pass
bfileindex = shelve.open(
os.path.join(self.data_dir, "buddyfileindex.db"), flag="n"
)
if sharedmtimes:
sharedmtimes.close()
try:
os.unlink(os.path.join(self.data_dir, "mtimes.db"))
except Exception:
pass
sharedmtimes = shelve.open(os.path.join(self.data_dir, "mtimes.db"), flag="n")
if bsharedmtimes:
bsharedmtimes.close()
try:
os.unlink(os.path.join(self.data_dir, "buddymtimes.db"))
except Exception:
pass
bsharedmtimes = shelve.open(
os.path.join(self.data_dir, "buddymtimes.db"), flag="n"
)
except Exception as error:
log.addwarning(_("Error while writing database files: %s") % error)
return None
return (
sharedfiles,
bsharedfiles,
sharedfilesstreams,
bsharedfilesstreams,
wordindex,
bwordindex,
fileindex,
bfileindex,
sharedmtimes,
bsharedmtimes,
)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def writeDownloadQueue(self):
self.config_lock.acquire()
realfile = os.path.join(self.data_dir, "transfers.pickle")
tmpfile = realfile + ".tmp"
backupfile = realfile + " .backup"
try:
handle = open(tmpfile, "wb")
except Exception as inst:
log.addwarning(
_("Something went wrong while opening your transfer list: %(error)s")
% {"error": str(inst)}
)
else:
try:
pickle.dump(
self.sections["transfers"]["downloads"],
handle,
protocol=pickle.HIGHEST_PROTOCOL,
)
handle.close()
try:
# Please let it be atomic...
os.rename(tmpfile, realfile)
except Exception as inst: # noqa: F841
# ...ugh. Okay, how about...
try:
os.unlink(backupfile)
except Exception:
pass
os.rename(realfile, backupfile)
os.rename(tmpfile, realfile)
except Exception as inst:
log.addwarning(
_("Something went wrong while writing your transfer list: %(error)s")
% {"error": str(inst)}
)
finally:
try:
handle.close()
except Exception:
pass
self.config_lock.release()
|
def writeDownloadQueue(self):
self.config_lock.acquire()
realfile = os.path.join(self.data_dir, "transfers.pickle")
tmpfile = realfile + ".tmp"
backupfile = realfile + " .backup"
try:
handle = open(tmpfile, "wb")
except Exception as inst:
log.addwarning(
_("Something went wrong while opening your transfer list: %(error)s")
% {"error": str(inst)}
)
else:
try:
pickle.dump(self.sections["transfers"]["downloads"], handle)
handle.close()
try:
# Please let it be atomic...
os.rename(tmpfile, realfile)
except Exception as inst: # noqa: F841
# ...ugh. Okay, how about...
try:
os.unlink(backupfile)
except Exception:
pass
os.rename(realfile, backupfile)
os.rename(tmpfile, realfile)
except Exception as inst:
log.addwarning(
_("Something went wrong while writing your transfer list: %(error)s")
% {"error": str(inst)}
)
finally:
try:
handle.close()
except Exception:
pass
self.config_lock.release()
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def _storeObjects(self, storable_objects):
for source, destination, filename in storable_objects:
self.sections["transfers"][destination].close()
self.sections["transfers"][destination] = shelve.open(
os.path.join(self.data_dir, filename),
flag="n",
protocol=pickle.HIGHEST_PROTOCOL,
)
for key, value in source.items():
self.sections["transfers"][destination][key] = value
|
def _storeObjects(self, storable_objects):
for source, destination, filename in storable_objects:
self.sections["transfers"][destination].close()
self.sections["transfers"][destination] = shelve.open(
os.path.join(self.data_dir, filename), flag="n"
)
for key, value in source.items():
self.sections["transfers"][destination][key] = value
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def writeAliases(self):
self.config_lock.acquire()
f = open(self.filename + ".alias", "wb")
pickle.dump(self.aliases, f, protocol=pickle.HIGHEST_PROTOCOL)
f.close()
self.config_lock.release()
|
def writeAliases(self):
self.config_lock.acquire()
f = open(self.filename + ".alias", "wb")
pickle.dump(self.aliases, f, 1)
f.close()
self.config_lock.release()
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def CountUsers(self):
numusers = len(self.users)
if numusers > 1:
self.LabelPeople.show()
self.LabelPeople.set_text(_("%i people in room") % numusers)
elif numusers == 1:
self.LabelPeople.show()
self.LabelPeople.set_text(_("You are alone"))
else:
self.LabelPeople.hide()
if self.room in self.roomsctrl.rooms:
iter = self.roomsctrl.roomsmodel.get_iter_first()
while iter:
if self.roomsctrl.roomsmodel.get_value(iter, 0) == self.room:
self.roomsctrl.roomsmodel.set(iter, 1, numusers)
break
iter = self.roomsctrl.roomsmodel.iter_next(iter)
else:
self.roomsctrl.roomsmodel.append([self.room, numusers, 0])
self.roomsctrl.rooms.append(self.room)
|
def CountUsers(self):
numusers = len(list(self.users.keys()))
if numusers > 1:
self.LabelPeople.show()
self.LabelPeople.set_text(_("%i people in room") % numusers)
elif numusers == 1:
self.LabelPeople.show()
self.LabelPeople.set_text(_("You are alone"))
else:
self.LabelPeople.hide()
if self.room in self.roomsctrl.rooms:
iter = self.roomsctrl.roomsmodel.get_iter_first()
while iter:
if self.roomsctrl.roomsmodel.get_value(iter, 0) == self.room:
self.roomsctrl.roomsmodel.set(iter, 1, numusers)
break
iter = self.roomsctrl.roomsmodel.iter_next(iter)
else:
self.roomsctrl.roomsmodel.append([self.room, numusers, 0])
self.roomsctrl.rooms.append(self.room)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def __init__(self, frame, message="", data=None, modal=True, Search=True):
gtk.Dialog.__init__(self)
self.connect("destroy", self.quit)
self.connect("delete-event", self.quit)
self.nicotine = frame
if modal:
self.set_modal(True)
self.Search = Search
self.box = gtk.VBox(spacing=10)
self.box.set_border_width(10)
self.box.show()
self.vbox.pack_start(self.box, False, False, 0)
if message:
label = gtk.Label()
label.set_markup(message)
label.set_line_wrap(False)
self.box.pack_start(label, False, False, 0)
label.show()
label.set_alignment(0, 0.5)
self.current = 0
self.data = data
hbox2 = gtk.HBox(spacing=5)
hbox2.show()
self.UF = gtk.Frame()
self.UF.show()
self.UF.set_shadow_type(gtk.ShadowType.ETCHED_IN)
self.box.pack_start(self.UF, False, False, 0)
vbox3 = gtk.VBox(spacing=5)
vbox3.set_border_width(5)
vbox3.show()
self.UF.add(vbox3)
self.UsernameLabel, self.Username = self.MakeLabelStaticEntry(
hbox2, "<b>%s:</b>" % _("Username"), "", expand=False
)
self.BrowseUser = self.nicotine.CreateIconButton(
gtk.STOCK_HARDDISK, "stock", self.OnBrowseUser, _("Browse")
)
hbox2.pack_start(self.BrowseUser, False, False, 0)
self.PositionLabel, self.Position = self.MakeLabelStaticEntry(
hbox2, _("<b>List Position:</b>"), "", expand=False, width=7, xalign=1
)
vbox3.pack_start(hbox2, False, False, 0)
hbox3 = gtk.HBox(spacing=5)
hbox3.show()
vbox3.pack_start(hbox3, False, False, 0)
self.FilenameLabel, self.Filename = self.MakeLabelStaticEntry(
hbox3, _("<b>File Name:</b>"), "", fill=True
)
hbox5 = gtk.HBox(spacing=5)
hbox5.show()
vbox3.pack_start(hbox5, False, False, 0)
self.DirectoryLabel, self.Directory = self.MakeLabelStaticEntry(
hbox5, _("<b>Directory:</b>"), "", fill=True
)
self.Media = gtk.Frame()
self.Media.show()
self.Media.set_shadow_type(gtk.ShadowType.ETCHED_IN)
hbox6 = gtk.HBox(spacing=5, homogeneous=False)
hbox6.set_border_width(5)
hbox6.show()
self.SizeLabel, self.Size = self.MakeLabelStaticEntry(
hbox6, _("<b>File Size:</b>"), "", expand=False, width=11, xalign=1
)
self.LengthLabel, self.Length = self.MakeLabelStaticEntry(
hbox6, _("<b>Length:</b>"), "", expand=False, width=7, xalign=0.5
)
self.BitrateLabel, self.Bitrate = self.MakeLabelStaticEntry(
hbox6, _("<b>Bitrate:</b>"), "", expand=False, width=12, xalign=0.5
)
self.Media.add(hbox6)
self.box.pack_start(self.Media, False, False, 0)
hbox7 = gtk.HBox(spacing=5, homogeneous=False)
hbox7.show()
self.box.pack_start(hbox7, False, False, 0)
self.ImmediateLabel, self.Immediate = self.MakeLabelStaticEntry(
hbox7, _("<b>Immediate Downloads:</b>"), "", expand=False, width=6, xalign=0.5
)
self.QueueLabel, self.Queue = self.MakeLabelStaticEntry(
hbox7, _("<b>Queue:</b>"), "", expand=False, width=6, xalign=1
)
hbox4 = gtk.HBox(spacing=5, homogeneous=False)
hbox4.show()
self.box.pack_start(hbox4, False, False, 0)
self.SpeedLabel, self.Speed = self.MakeLabelStaticEntry(
hbox4, _("<b>Last Speed:</b>"), "", expand=False, width=11, xalign=1
)
self.Country = gtk.Label()
self.Country.hide()
hbox4.pack_start(self.Country, False, False, 0)
self.buttonbox = gtk.HBox(False, 2)
self.buttonbox.show()
self.buttonbox.set_spacing(2)
self.box.pack_start(self.buttonbox, False, False, 0)
# Download Button
self.DownloadItem = self.nicotine.CreateIconButton(
gtk.STOCK_GO_DOWN, "stock", self.OnDownloadItem, _("Download")
)
self.buttonbox.pack_start(self.DownloadItem, False, False, 0)
# Download All Button
self.DownloadAll = self.nicotine.CreateIconButton(
gtk.STOCK_GO_DOWN, "stock", self.OnDownloadAll, _("Download All")
)
self.buttonbox.pack_start(self.DownloadAll, False, False, 0)
self.Selected = self.MakeLabel(
self.buttonbox,
_("<b>%s</b> File(s) Selected") % len(self.data),
expand=False,
xalign=1,
)
self.Previous = self.nicotine.CreateIconButton(
gtk.STOCK_GO_BACK, "stock", self.OnPrevious, _("Previous")
)
self.Next = self.nicotine.CreateIconButton(
gtk.STOCK_GO_FORWARD, "stock", self.OnNext, _("Next")
)
self.buttonbox.pack_end(self.Next, False, False, 0)
self.buttonbox.pack_end(self.Previous, False, False, 0)
button = self.nicotine.CreateIconButton(
gtk.STOCK_CLOSE, "stock", self.click, _("Close")
)
button.props.can_default = True
self.action_area.pack_start(button, False, False, 0)
button.grab_default()
self.ret = None
self.Display(self.current)
|
def __init__(self, frame, message="", data=None, modal=True, Search=True):
gtk.Dialog.__init__(self)
self.connect("destroy", self.quit)
self.connect("delete-event", self.quit)
self.nicotine = frame
if modal:
self.set_modal(True)
self.Search = Search
self.box = gtk.VBox(spacing=10)
self.box.set_border_width(10)
self.box.show()
self.vbox.pack_start(self.box, False, False, 0)
if message:
label = gtk.Label()
label.set_markup(message)
label.set_line_wrap(False)
self.box.pack_start(label, False, False, 0)
label.show()
label.set_alignment(0, 0.5)
self.current = 0
self.data = data
hbox2 = gtk.HBox(spacing=5)
hbox2.show()
self.UF = gtk.Frame()
self.UF.show()
self.UF.set_shadow_type(gtk.ShadowType.ETCHED_IN)
self.box.pack_start(self.UF, False, False, 0)
vbox3 = gtk.VBox(spacing=5)
vbox3.set_border_width(5)
vbox3.show()
self.UF.add(vbox3)
self.UsernameLabel, self.Username = self.MakeLabelStaticEntry(
hbox2, "<b>%s:</b>" % _("Username"), "", expand=False
)
self.BrowseUser = self.nicotine.CreateIconButton(
gtk.STOCK_HARDDISK, "stock", self.OnBrowseUser, _("Browse")
)
hbox2.pack_start(self.BrowseUser, False, False, 0)
self.PositionLabel, self.Position = self.MakeLabelStaticEntry(
hbox2, _("<b>List Position:</b>"), "", expand=False, width=7, xalign=1
)
vbox3.pack_start(hbox2, False, False, 0)
hbox3 = gtk.HBox(spacing=5)
hbox3.show()
vbox3.pack_start(hbox3, False, False, 0)
self.FilenameLabel, self.Filename = self.MakeLabelStaticEntry(
hbox3, _("<b>File Name:</b>"), "", fill=True
)
hbox5 = gtk.HBox(spacing=5)
hbox5.show()
vbox3.pack_start(hbox5, False, False, 0)
self.DirectoryLabel, self.Directory = self.MakeLabelStaticEntry(
hbox5, _("<b>Directory:</b>"), "", fill=True
)
self.Media = gtk.Frame()
self.Media.show()
self.Media.set_shadow_type(gtk.ShadowType.ETCHED_IN)
hbox6 = gtk.HBox(spacing=5, homogeneous=False)
hbox6.set_border_width(5)
hbox6.show()
self.SizeLabel, self.Size = self.MakeLabelStaticEntry(
hbox6, _("<b>File Size:</b>"), "", expand=False, width=11, xalign=1
)
self.LengthLabel, self.Length = self.MakeLabelStaticEntry(
hbox6, _("<b>Length:</b>"), "", expand=False, width=7, xalign=0.5
)
self.BitrateLabel, self.Bitrate = self.MakeLabelStaticEntry(
hbox6, _("<b>Bitrate:</b>"), "", expand=False, width=12, xalign=0.5
)
self.Media.add(hbox6)
self.box.pack_start(self.Media, False, False, 0)
hbox7 = gtk.HBox(spacing=5, homogeneous=False)
hbox7.show()
self.box.pack_start(hbox7, False, False, 0)
self.ImmediateLabel, self.Immediate = self.MakeLabelStaticEntry(
hbox7, _("<b>Immediate Downloads:</b>"), "", expand=False, width=6, xalign=0.5
)
self.QueueLabel, self.Queue = self.MakeLabelStaticEntry(
hbox7, _("<b>Queue:</b>"), "", expand=False, width=6, xalign=1
)
hbox4 = gtk.HBox(spacing=5, homogeneous=False)
hbox4.show()
self.box.pack_start(hbox4, False, False, 0)
self.SpeedLabel, self.Speed = self.MakeLabelStaticEntry(
hbox4, _("<b>Last Speed:</b>"), "", expand=False, width=11, xalign=1
)
self.Country = gtk.Label()
self.Country.hide()
hbox4.pack_start(self.Country, False, False, 0)
self.buttonbox = gtk.HBox(False, 2)
self.buttonbox.show()
self.buttonbox.set_spacing(2)
self.box.pack_start(self.buttonbox, False, False, 0)
# Download Button
self.DownloadItem = self.nicotine.CreateIconButton(
gtk.STOCK_GO_DOWN, "stock", self.OnDownloadItem, _("Download")
)
self.buttonbox.pack_start(self.DownloadItem, False, False, 0)
# Download All Button
self.DownloadAll = self.nicotine.CreateIconButton(
gtk.STOCK_GO_DOWN, "stock", self.OnDownloadAll, _("Download All")
)
self.buttonbox.pack_start(self.DownloadAll, False, False, 0)
self.Selected = self.MakeLabel(
self.buttonbox,
_("<b>%s</b> File(s) Selected") % len(list(self.data.keys())),
expand=False,
xalign=1,
)
self.Previous = self.nicotine.CreateIconButton(
gtk.STOCK_GO_BACK, "stock", self.OnPrevious, _("Previous")
)
self.Next = self.nicotine.CreateIconButton(
gtk.STOCK_GO_FORWARD, "stock", self.OnNext, _("Next")
)
self.buttonbox.pack_end(self.Next, False, False, 0)
self.buttonbox.pack_end(self.Previous, False, False, 0)
button = self.nicotine.CreateIconButton(
gtk.STOCK_CLOSE, "stock", self.click, _("Close")
)
button.props.can_default = True
self.action_area.pack_start(button, False, False, 0)
button.grab_default()
self.ret = None
self.Display(self.current)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def OnPrevious(self, widget):
if len(self.data) > 1:
_list = list(self.data.keys())
if self.current not in _list:
ix -= 1 # noqa: F821
else:
ix = _list.index(self.current)
ix -= 1
if ix < 0:
ix = -1
elif ix >= len(_list):
ix = 0
if ix is not None:
self.current = _list[ix]
if self.current is None:
return
self.Display(self.current)
|
def OnPrevious(self, widget):
if len(list(self.data.keys())) > 1:
_list = list(self.data.keys())
if self.current not in _list:
ix -= 1 # noqa: F821
else:
ix = _list.index(self.current)
ix -= 1
if ix < 0:
ix = -1
elif ix >= len(_list):
ix = 0
if ix is not None:
self.current = _list[ix]
if self.current is None:
return
self.Display(self.current)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def OnNext(self, widget):
if len(self.data) > 1:
_list = list(self.data.keys())
if self.current not in _list:
ix += 1 # noqa: F821
else:
ix = _list.index(self.current)
ix += 1
if ix < 0:
ix = -1
elif ix >= len(_list):
ix = 0
if ix is not None:
self.current = _list[ix]
if self.current is None:
return
self.Display(self.current)
|
def OnNext(self, widget):
if len(list(self.data.keys())) > 1:
_list = list(self.data.keys())
if self.current not in _list:
ix += 1 # noqa: F821
else:
ix = _list.index(self.current)
ix += 1
if ix < 0:
ix = -1
elif ix >= len(_list):
ix = 0
if ix is not None:
self.current = _list[ix]
if self.current is None:
return
self.Display(self.current)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def Display(self, item):
if item not in self.data:
return
if not self.Search:
self.Immediate.hide()
self.Position.hide()
self.Country.hide()
self.Queue.hide()
self.Immediate.hide()
self.ImmediateLabel.hide()
self.PositionLabel.hide()
self.QueueLabel.hide()
self.ImmediateLabel.hide()
self.DownloadItem.hide()
self.DownloadAll.hide()
else:
self.Immediate.show()
self.Position.show()
self.Country.show()
self.Queue.show()
self.Immediate.show()
self.ImmediateLabel.show()
self.PositionLabel.show()
self.QueueLabel.show()
self.ImmediateLabel.show()
self.DownloadItem.show()
self.DownloadAll.show()
self.current = item
data = self.data[self.current]
More = False
if len(self.data) > 1:
More = True
self.Next.set_sensitive(More)
self.Previous.set_sensitive(More)
self.DownloadAll.set_sensitive(More)
self.Username.set_text(data["user"])
self.Filename.set_text(data["filename"])
self.Directory.set_text(data["directory"])
self.Size.set_text(str(data["size"]))
self.Speed.set_text(data["speed"])
self.Position.set_text(str(data["position"]))
if data["bitrate"] not in ("", None):
self.Bitrate.set_text(data["bitrate"])
else:
self.Bitrate.set_text("")
self.Length.set_text(data["length"])
self.Queue.set_text(data["queue"])
self.Immediate.set_text(str(data["immediate"] == "Y"))
country = data["country"]
if country not in ("", None):
self.Country.set_markup(_("<b>Country Code:</b> ") + country)
self.Country.show()
else:
self.Country.set_text("")
self.Country.hide()
|
def Display(self, item):
if item not in self.data:
return
if not self.Search:
self.Immediate.hide()
self.Position.hide()
self.Country.hide()
self.Queue.hide()
self.Immediate.hide()
self.ImmediateLabel.hide()
self.PositionLabel.hide()
self.QueueLabel.hide()
self.ImmediateLabel.hide()
self.DownloadItem.hide()
self.DownloadAll.hide()
else:
self.Immediate.show()
self.Position.show()
self.Country.show()
self.Queue.show()
self.Immediate.show()
self.ImmediateLabel.show()
self.PositionLabel.show()
self.QueueLabel.show()
self.ImmediateLabel.show()
self.DownloadItem.show()
self.DownloadAll.show()
self.current = item
data = self.data[self.current]
More = False
if len(list(self.data.keys())) > 1:
More = True
self.Next.set_sensitive(More)
self.Previous.set_sensitive(More)
self.DownloadAll.set_sensitive(More)
self.Username.set_text(data["user"])
self.Filename.set_text(data["filename"])
self.Directory.set_text(data["directory"])
self.Size.set_text(str(data["size"]))
self.Speed.set_text(data["speed"])
self.Position.set_text(str(data["position"]))
if data["bitrate"] not in ("", None):
self.Bitrate.set_text(data["bitrate"])
else:
self.Bitrate.set_text("")
self.Length.set_text(data["length"])
self.Queue.set_text(data["queue"])
self.Immediate.set_text(str(data["immediate"] == "Y"))
country = data["country"]
if country not in ("", None):
self.Country.set_markup(_("<b>Country Code:</b> ") + country)
self.Country.show()
else:
self.Country.set_text("")
self.Country.hide()
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def UpdateDownloadFilters(self):
proccessedfilters = []
outfilter = "(\\\\("
failed = {}
df = self.np.config.sections["transfers"]["downloadfilters"]
df.sort()
# Get Filters from config file and check their escaped status
# Test if they are valid regular expressions and save error messages
for item in df:
filter, escaped = item
if escaped:
dfilter = re.escape(filter)
dfilter = dfilter.replace("\\*", ".*")
else:
dfilter = filter
try:
re.compile("(" + dfilter + ")")
outfilter += dfilter
proccessedfilters.append(dfilter)
except Exception as e:
failed[dfilter] = e
proccessedfilters.append(dfilter)
if item is not df[-1]:
outfilter += "|"
# Crop trailing pipes
while outfilter[-1] == "|":
outfilter = outfilter[:-1]
outfilter += ")$)"
try:
re.compile(outfilter)
self.np.config.sections["transfers"]["downloadregexp"] = outfilter
# Send error messages for each failed filter to log window
if len(failed) >= 1:
errors = ""
for filter, error in list(failed.items()):
errors += "Filter: %s Error: %s " % (filter, error)
error = _(
"Error: %(num)d Download filters failed! %(error)s "
% {"num": len(failed), "error": errors}
)
self.logMessage(error)
except Exception as e:
# Strange that individual filters _and_ the composite filter both fail
self.logMessage(
_("Error: Download Filter failed! Verify your filters. Reason: %s" % e)
)
self.np.config.sections["transfers"]["downloadregexp"] = ""
|
def UpdateDownloadFilters(self):
proccessedfilters = []
outfilter = "(\\\\("
failed = {}
df = self.np.config.sections["transfers"]["downloadfilters"]
df.sort()
# Get Filters from config file and check their escaped status
# Test if they are valid regular expressions and save error messages
for item in df:
filter, escaped = item
if escaped:
dfilter = re.escape(filter)
dfilter = dfilter.replace("\\*", ".*")
else:
dfilter = filter
try:
re.compile("(" + dfilter + ")")
outfilter += dfilter
proccessedfilters.append(dfilter)
except Exception as e:
failed[dfilter] = e
proccessedfilters.append(dfilter)
if item is not df[-1]:
outfilter += "|"
# Crop trailing pipes
while outfilter[-1] == "|":
outfilter = outfilter[:-1]
outfilter += ")$)"
try:
re.compile(outfilter)
self.np.config.sections["transfers"]["downloadregexp"] = outfilter
# Send error messages for each failed filter to log window
if len(list(failed.keys())) >= 1:
errors = ""
for filter, error in list(failed.items()):
errors += "Filter: %s Error: %s " % (filter, error)
error = _(
"Error: %(num)d Download filters failed! %(error)s "
% {"num": len(list(failed.keys())), "error": errors}
)
self.logMessage(error)
except Exception as e:
# Strange that individual filters _and_ the composite filter both fail
self.logMessage(
_("Error: Download Filter failed! Verify your filters. Reason: %s" % e)
)
self.np.config.sections["transfers"]["downloadregexp"] = ""
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def OnVerifyFilter(self, widget):
outfilter = "(\\\\("
df = list(self.filtersiters.keys())
df.sort()
proccessedfilters = []
failed = {}
for filter in df:
iter = self.filtersiters[filter]
dfilter = self.filterlist.get_value(iter, 0)
escaped = self.filterlist.get_value(iter, 1)
if escaped:
dfilter = re.escape(dfilter)
dfilter = dfilter.replace("\\*", ".*")
try:
re.compile("(" + dfilter + ")")
outfilter += dfilter
proccessedfilters.append(dfilter)
except Exception as e:
failed[dfilter] = e
if filter is not df[-1]:
outfilter += "|"
outfilter += ")$)"
try:
re.compile(outfilter)
except Exception as e:
failed[outfilter] = e
if len(failed) >= 1:
errors = ""
for filter, error in list(failed.items()):
errors += "Filter: %(filter)s Error: %(error)s " % {
"filter": filter,
"error": error,
}
error = _("%(num)d Failed! %(error)s " % {"num": len(failed), "error": errors})
self.VerifiedLabel.set_markup(
'<span color="red" weight="bold">%s</span>' % error
)
else:
self.VerifiedLabel.set_markup("<b>Filters Successful</b>")
|
def OnVerifyFilter(self, widget):
outfilter = "(\\\\("
df = list(self.filtersiters.keys())
df.sort()
proccessedfilters = []
failed = {}
for filter in df:
iter = self.filtersiters[filter]
dfilter = self.filterlist.get_value(iter, 0)
escaped = self.filterlist.get_value(iter, 1)
if escaped:
dfilter = re.escape(dfilter)
dfilter = dfilter.replace("\\*", ".*")
try:
re.compile("(" + dfilter + ")")
outfilter += dfilter
proccessedfilters.append(dfilter)
except Exception as e:
failed[dfilter] = e
if filter is not df[-1]:
outfilter += "|"
outfilter += ")$)"
try:
re.compile(outfilter)
except Exception as e:
failed[outfilter] = e
if len(list(failed.keys())) >= 1:
errors = ""
for filter, error in list(failed.items()):
errors += "Filter: %(filter)s Error: %(error)s " % {
"filter": filter,
"error": error,
}
error = _(
"%(num)d Failed! %(error)s "
% {"num": len(list(failed.keys())), "error": errors}
)
self.VerifiedLabel.set_markup(
'<span color="red" weight="bold">%s</span>' % error
)
else:
self.VerifiedLabel.set_markup("<b>Filters Successful</b>")
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def OnSave(self, widget):
sharesdir = os.path.join(self.data_dir, "usershares")
try:
if not os.path.exists(sharesdir):
os.mkdir(sharesdir)
except Exception as msg:
error = _(
"Can't create directory '%(folder)s', reported error: %(error)s"
% {"folder": sharesdir, "error": msg}
)
self.frame.logMessage(error)
try:
import pickle as mypickle
import bz2
sharesfile = bz2.BZ2File(os.path.join(sharesdir, CleanFile(self.user)), "w")
mypickle.dump(self.shares, sharesfile, protocol=mypickle.HIGHEST_PROTOCOL)
sharesfile.close()
except Exception as msg:
error = _(
"Can't save shares, '%(user)s', reported error: %(error)s"
% {"user": self.user, "error": msg}
)
self.frame.logMessage(error)
|
def OnSave(self, widget):
sharesdir = os.path.join(self.data_dir, "usershares")
try:
if not os.path.exists(sharesdir):
os.mkdir(sharesdir)
except Exception as msg:
error = _(
"Can't create directory '%(folder)s', reported error: %(error)s"
% {"folder": sharesdir, "error": msg}
)
self.frame.logMessage(error)
try:
import pickle as mypickle
import bz2
sharesfile = bz2.BZ2File(os.path.join(sharesdir, CleanFile(self.user)), "w")
mypickle.dump(self.shares, sharesfile, mypickle.HIGHEST_PROTOCOL)
sharesfile.close()
except Exception as msg:
error = _(
"Can't save shares, '%(user)s', reported error: %(error)s"
% {"user": self.user, "error": msg}
)
self.frame.logMessage(error)
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
def makeNetworkMessage(self, nozlib=0, rebuild=False):
# Elaborate hack, to save CPU
# Store packed message contents in self.built, and use
# instead of repacking it, unless rebuild is True
if not rebuild and self.built is not None:
return self.built
msg = b""
msg = msg + self.packObject(len(self.list))
for key, value in self.list.items():
msg = msg + self.packObject(key.replace(os.sep, "\\")) + value
if not nozlib:
self.built = zlib.compress(msg)
else:
self.built = msg
return self.built
|
def makeNetworkMessage(self, nozlib=0, rebuild=False):
# Elaborate hack, to save CPU
# Store packed message contents in self.built, and use
# instead of repacking it, unless rebuild is True
if not rebuild and self.built is not None:
return self.built
msg = b""
msg = msg + self.packObject(len(list(self.list.keys())))
for key, value in self.list.items():
msg = msg + self.packObject(key.replace(os.sep, "\\")) + value
if not nozlib:
self.built = zlib.compress(msg)
else:
self.built = msg
return self.built
|
https://github.com/Nicotine-Plus/nicotine-plus/issues/267
|
Thu 07:58 Nicotine+ supports a country code blocker. This requires a (GPL'ed) library called GeoIP. You can find it here: C library: https://github.com/maxmind/geoip-api-c Python
bindings: https://github.com/maxmind/geoip-api-python (the python bindings require the C library)
Traceback (most recent call last):
File "/usr/local/bin/nicotine", line 255, in <module>
run()
File "/usr/local/bin/nicotine", line 231, in run
app = frame.MainApp(data_dir, config, plugins, trayicon, hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 3595, in __init__
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
File "/usr/local/lib/python3.7/site-packages/pynicotine/gtkgui/frame.py", line 273, in __init__
config
File "/usr/local/lib/python3.7/site-packages/pynicotine/pynicotine.py", line 130, in __init__
self.shares = Shares(self)
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 75, in __init__
self.CompressShares("normal")
File "/usr/local/lib/python3.7/site-packages/pynicotine/shares.py", line 201, in CompressShares
m.makeNetworkMessage(nozlib=0, rebuild=True)
File "/usr/local/lib/python3.7/site-packages/pynicotine/slskmessages.py", line 1709, in makeNetworkMessage
msg = msg + self.packObject(len(list(self.list.keys())))
File "/usr/local/lib/python3.7/_collections_abc.py", line 720, in __iter__
yield from self._mapping
File "/usr/local/lib/python3.7/shelve.py", line 95, in __iter__
for k in self.dict.keys():
SystemError: Negative size passed to PyBytes_FromStringAndSize
|
SystemError
|
async def demo(docker):
try:
await docker.images.get("alpine:latest")
except DockerError as e:
if e.status == 404:
await docker.pull("alpine:latest")
else:
print("Error retrieving alpine:latest image.")
return
subscriber = docker.events.subscribe()
config = {
"Cmd": ["tail", "-f", "/var/log/dmesg"],
"Image": "alpine:latest",
"AttachStdin": False,
"AttachStdout": True,
"AttachStderr": True,
"Tty": False,
"OpenStdin": False,
"StdinOnce": False,
}
container = await docker.containers.create_or_replace(config=config, name="testing")
await container.start(config)
print(f"=> created and started container {container._id[:12]}")
while True:
event = await subscriber.get()
if event is None:
break
print(f"event: {event!r}")
# Demonstrate simple event-driven container mgmt.
if event["Actor"]["ID"] == container._id:
if event["Action"] == "start":
await container.stop()
print(f"=> killed {container._id[:12]}")
elif event["Action"] == "stop":
await container.delete(force=True)
print(f"=> deleted {container._id[:12]}")
elif event["Action"] == "destroy":
print("=> done with this container!")
break
|
async def demo(docker):
try:
await docker.images.get("alpine:latest")
except DockerError as e:
if e.status == 404:
await docker.pull("alpine:latest")
else:
print("Error retrieving alpine:latest image.")
return
subscriber = docker.events.subscribe()
config = {
"Cmd": ["tail", "-f", "/var/log/dmesg"],
"Image": "alpine:latest",
"AttachStdin": False,
"AttachStdout": True,
"AttachStderr": True,
"Tty": False,
"OpenStdin": False,
"StdinOnce": False,
}
container = await docker.containers.create_or_replace(config=config, name="testing")
await container.start(config)
print(f"=> created and started container {container._id[:12]}")
while True:
event = await subscriber.get()
print(f"event: {event!r}")
# Demonstrate simple event-driven container mgmt.
if event["Actor"]["ID"] == container._id:
if event["Action"] == "start":
await container.stop()
print(f"=> killed {container._id[:12]}")
elif event["Action"] == "stop":
await container.delete(force=True)
print(f"=> deleted {container._id[:12]}")
elif event["Action"] == "destroy":
print("=> done with this container!")
break
|
https://github.com/aio-libs/aiodocker/issues/34
|
^CTraceback (most recent call last):
File "/tmp/events.py", line 24, in <module>
loop.run_until_complete(demo(docker))
File "/usr/lib/python3.6/asyncio/base_events.py", line 454, in run_until_complete
self.run_forever()
File "/usr/lib/python3.6/asyncio/base_events.py", line 421, in run_forever
self._run_once()
File "/usr/lib/python3.6/asyncio/base_events.py", line 1389, in _run_once
event_list = self._selector.select(timeout)
File "/usr/lib/python3.6/selectors.py", line 445, in select
fd_event_list = self._epoll.poll(timeout, max_ev)
KeyboardInterrupt
Task exception was never retrieved
future: <Task finished coro=<DockerEvents.run() done, defined at /home/cecile/.local/lib/python3.6/site-packages/aiodocker-0.7-py3.6.egg/aiodocker/docker.py:512> exception=ClientConnectorError(111, "Cannot connect to host 127.0.0.1:666 ssl:False [Can not connect to 127.0.0.1:666 [Connect call failed ('127.0.0.1', 666)]]")>
Traceback (most recent call last):
File "/home/cecile/.local/lib/python3.6/site-packages/aiohttp/connector.py", line 651, in _create_direct_connection
local_addr=self._local_addr)
File "/usr/lib/python3.6/asyncio/base_events.py", line 776, in create_connection
raise exceptions[0]
File "/usr/lib/python3.6/asyncio/base_events.py", line 763, in create_connection
yield from self.sock_connect(sock, address)
File "/usr/lib/python3.6/asyncio/selector_events.py", line 451, in sock_connect
return (yield from fut)
File "/usr/lib/python3.6/asyncio/selector_events.py", line 481, in _sock_connect_cb
raise OSError(err, 'Connect call failed %s' % (address,))
ConnectionRefusedError: [Errno 111] Connect call failed ('127.0.0.1', 666)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/cecile/.local/lib/python3.6/site-packages/aiohttp/connector.py", line 374, in connect
proto = yield from self._create_connection(req)
File "/home/cecile/.local/lib/python3.6/site-packages/aiohttp/connector.py", line 628, in _create_connection
_, proto = yield from self._create_direct_connection(req)
File "/home/cecile/.local/lib/python3.6/site-packages/aiohttp/connector.py", line 678, in _create_direct_connection
(req.host, req.port, exc.strerror)) from exc
aiohttp.client_exceptions.ClientConnectorError: [Errno 111] Can not connect to 127.0.0.1:666 [Connect call failed ('127.0.0.1', 666)]
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/cecile/.local/lib/python3.6/site-packages/aiodocker-0.7-py3.6.egg/aiodocker/docker.py", line 525, in run
params=params,
File "/home/cecile/.local/lib/python3.6/site-packages/aiodocker-0.7-py3.6.egg/aiodocker/docker.py", line 121, in _query
data=data, **kwargs)
File "/home/cecile/.local/lib/python3.6/site-packages/aiohttp/client.py", line 621, in __await__
resp = yield from self._coro
File "/home/cecile/.local/lib/python3.6/site-packages/aiohttp/client.py", line 225, in _request
conn = yield from self._connector.connect(req)
File "/home/cecile/.local/lib/python3.6/site-packages/aiohttp/connector.py", line 379, in connect
.format(key, exc.strerror)) from exc
aiohttp.client_exceptions.ClientConnectorError: [Errno 111] Cannot connect to host 127.0.0.1:666 ssl:False [Can not connect to 127.0.0.1:666 [Connect call failed ('127.0.0.1', 666)]]
Exception ignored in: <coroutine object demo at 0x7f9912428f68>
Traceback (most recent call last):
File "/tmp/events.py", line 13, in demo
File "/home/cecile/.local/lib/python3.6/site-packages/aiodocker-0.7-py3.6.egg/aiodocker/channel.py", line 14, in get
File "/usr/lib/python3.6/asyncio/queues.py", line 169, in get
File "/usr/lib/python3.6/asyncio/base_events.py", line 573, in call_soon
File "/usr/lib/python3.6/asyncio/base_events.py", line 357, in _check_closed
RuntimeError: Event loop is closed
|
ConnectionRefusedError
|
def main():
if USING_ARGPARSE:
parser = argparse.ArgumentParser(
description="Create views from nmap and passive databases.",
parents=[DB().argparser],
)
else:
parser = optparse.OptionParser(
description="Create views from nmap and passive databases."
)
parser.parse_args_orig = parser.parse_args
def my_parse_args():
res = parser.parse_args_orig()
res[0].ensure_value("ips", res[1])
return res[0]
parser.parse_args = my_parse_args
parser.add_argument = parser.add_option
fltnmap = db.nmap.flt_empty
fltpass = db.passive.flt_empty
_from = []
parser.add_argument(
"--category",
metavar="CATEGORY",
help="Choose a different category than the default",
)
parser.add_argument(
"--test",
"-t",
action="store_true",
help="Give results in standard output instead of inserting them in database.",
)
parser.add_argument(
"--verbose",
"-v",
action="store_true",
help="For test output, print out formated results.",
)
if not USING_ARGPARSE:
if "nmap" in sys.argv:
for args, kargs in db.nmap.argparser.args:
parser.add_option(*args, **kargs)
elif "passive" in sys.argv:
for args, kargs in db.passive.argparser.args:
parser.add_option(*args, **kargs)
else:
print("ivre db2view: error: invalid subcommand {nmap, passive}.")
exit(-1)
else:
subparsers = parser.add_subparsers(
dest="view_source", help="Accepted values are 'nmap' and 'passive'"
)
subparsers.add_parser("nmap", parents=[db.nmap.argparser])
passparser = subparsers.add_parser("passive", parents=[db.passive.argparser])
passparser.add_argument("ips", nargs="*")
args = parser.parse_args()
if args.category:
db.view.category = args.category
if not args.view_source:
args.view_source = "all"
if args.view_source == "all":
fltnmap = DB().parse_args(args, flt=fltnmap)
fltpass = DB().parse_args(args, flt=fltpass)
_from = [from_nmap(fltnmap), from_passive(fltpass)]
elif args.view_source == "nmap":
fltnmap = db.nmap.parse_args(args, fltnmap)
_from = [from_nmap(fltnmap)]
elif args.view_source == "passive":
fltpass = db.passive.parse_args(args, fltpass)
_from = [from_passive(fltpass)]
if args.test:
def output(x):
print(x)
else:
output = db.view.store_or_merge_host
# Filter by ip for passive
if args.view_source == "passive" and args.ips:
flt = db.passive.flt_empty
for a in args.ips:
if "-" in a:
a = a.split("-", 1)
if a[0].isdigit():
a[0] = int(a[0])
if a[1].isdigit():
a[1] = int(a[1])
flt = db.passive.flt_or(flt, db.passive.searchrange(a[0], a[1]))
elif "/" in a:
flt = db.passive.flt_or(flt, db.passive.searchnet(a))
else:
if a.isdigit():
a = db.passive.ip2internal(int(a))
flt = db.passive.flt_or(flt, db.passive.searchhost(a))
fltpass = db.passive.flt_and(fltpass, flt)
# Output results
itr = to_view(_from)
if not itr:
return
for elt in itr:
output(elt)
|
def main():
if USING_ARGPARSE:
parser = argparse.ArgumentParser(
description="Create views from nmap and passive databases."
)
else:
parser = optparse.OptionParser(
description="Create views from nmap and passive databases."
)
parser.parse_args_orig = parser.parse_args
def my_parse_args():
res = parser.parse_args_orig()
res[0].ensure_value("ips", res[1])
return res[0]
parser.parse_args = my_parse_args
parser.add_argument = parser.add_option
fltnmap = db.nmap.flt_empty
fltpass = db.passive.flt_empty
_from = []
parser.add_argument(
"--category",
metavar="CATEGORY",
help="Choose a different category than the default",
)
parser.add_argument(
"--test",
"-t",
action="store_true",
help="Give results in standard output instead of inserting them in database.",
)
parser.add_argument(
"--verbose",
"-v",
action="store_true",
help="For test output, print out formated results.",
)
if not USING_ARGPARSE:
if "nmap" in sys.argv:
for args, kargs in db.nmap.argparser.args:
parser.add_option(*args, **kargs)
elif "passive" in sys.argv:
for args, kargs in db.passive.argparser.args:
parser.add_option(*args, **kargs)
else:
print("ivre db2view: error: invalid subcommand {nmap, passive}.")
exit(-1)
else:
subparsers = parser.add_subparsers(
dest="view_source", help="Accepted values are 'nmap' and 'passive'"
)
subparsers.add_parser("nmap", parents=[db.nmap.argparser])
passparser = subparsers.add_parser("passive", parents=[db.passive.argparser])
passparser.add_argument("ips", nargs="*")
args = parser.parse_args()
if args.category:
db.view.category = args.category
if not args.view_source:
args.view_source = "all"
if args.view_source == "all":
fltnmap = db.nmap.parse_args(args)
fltpass = db.passive.parse_args(args)
_from = [from_nmap(fltnmap), from_passive(fltpass)]
elif args.view_source == "nmap":
fltnmap = db.nmap.parse_args(args, fltnmap)
_from = [from_nmap(fltnmap)]
elif args.view_source == "passive":
fltpass = db.passive.parse_args(args, fltpass)
_from = [from_passive(fltpass)]
if args.test:
def output(x):
print(x)
else:
output = db.view.store_or_merge_host
# Filter by ip for passive
if args.view_source == "passive" and args.ips:
flt = db.passive.flt_empty
for a in args.ips:
if "-" in a:
a = a.split("-", 1)
if a[0].isdigit():
a[0] = int(a[0])
if a[1].isdigit():
a[1] = int(a[1])
flt = db.passive.flt_or(flt, db.passive.searchrange(a[0], a[1]))
elif "/" in a:
flt = db.passive.flt_or(flt, db.passive.searchnet(a))
else:
if a.isdigit():
a = db.passive.ip2internal(int(a))
flt = db.passive.flt_or(flt, db.passive.searchhost(a))
fltpass = db.passive.flt_and(fltpass, flt)
# Output results
itr = to_view(_from)
if not itr:
return
for elt in itr:
output(elt)
|
https://github.com/cea-sec/ivre/issues/569
|
$ ivre db2view
Traceback (most recent call last):
File "/usr/bin/ivre", line 84, in <module>
main()
File "/usr/bin/ivre", line 56, in main
tools.get_command(possible_commands[0])()
File "/usr/lib/python3.7/site-packages/ivre/tools/db2view.py", line 90, in main
fltnmap = db.nmap.parse_args(args)
File "/usr/lib/python3.7/site-packages/ivre/db/__init__.py", line 838, in parse_args
flt = super(DBActive, self).parse_args(args, flt=flt)
File "/usr/lib/python3.7/site-packages/ivre/db/__init__.py", line 107, in parse_args
if args.country is not None:
AttributeError: 'Namespace' object has no attribute 'country'
|
AttributeError
|
def _read_nmap_probes():
global _NMAP_CUR_PROBE, _NMAP_PROBES_POPULATED
_NMAP_CUR_PROBE = None
def parse_line(line):
global _NMAP_PROBES, _NMAP_CUR_PROBE
if line.startswith(b"match "):
line = line[6:]
soft = False
elif line.startswith(b"softmatch "):
line = line[10:]
soft = True
elif line.startswith(b"Probe "):
_NMAP_CUR_PROBE = []
proto, name, probe = line[6:].split(b" ", 2)
_NMAP_PROBES.setdefault(proto.lower().decode(), {})[name.decode()] = {
"probe": probe,
"fp": _NMAP_CUR_PROBE,
}
return
else:
return
service, data = line.split(b" ", 1)
info = {"soft": soft}
while data:
if data.startswith(b"cpe:"):
key = "cpe"
data = data[4:]
else:
key = data[0:1].decode()
data = data[1:]
sep = data[0:1]
data = data[1:]
index = data.index(sep)
value = data[:index]
data = data[index + 1 :]
flag = b""
if data:
if b" " in data:
flag, data = data.split(b" ", 1)
else:
flag, data = data, b""
if key == "m":
if value.endswith(b"\\r\\n"):
value = value[:-4] + b"(?:\\r\\n|$)"
elif value.endswith(b"\\\\n"):
value = value[:3] + b"(?:\\\\n|$)"
elif value.endswith(b"\\n"):
value = value[:-2] + b"(?:\\n|$)"
value = re.compile(
value,
flags=sum(
getattr(re, f) if hasattr(re, f) else 0
for f in flag.decode().upper()
),
)
flag = b""
else:
try:
value = value.decode("utf-8")
except UnicodeDecodeError:
value = repr(value)
info[key] = (value, flag)
_NMAP_CUR_PROBE.append((service.decode(), info))
try:
with open(
os.path.join(config.NMAP_SHARE_PATH, "nmap-service-probes"), "rb"
) as fdesc:
for line in fdesc:
parse_line(line[:-1])
except (AttributeError, TypeError, IOError):
LOGGER.warning("Cannot read Nmap service fingerprint file.", exc_info=True)
del _NMAP_CUR_PROBE
_NMAP_PROBES_POPULATED = True
|
def _read_nmap_probes():
global _NMAP_CUR_PROBE, _NMAP_PROBES_POPULATED
_NMAP_CUR_PROBE = None
def parse_line(line):
global _NMAP_PROBES, _NMAP_CUR_PROBE
if line.startswith(b"match "):
line = line[6:]
soft = False
elif line.startswith(b"softmatch "):
line = line[10:]
soft = True
elif line.startswith(b"Probe "):
_NMAP_CUR_PROBE = []
proto, name, probe = line[6:].split(b" ", 2)
_NMAP_PROBES.setdefault(proto.lower().decode(), {})[name.decode()] = {
"probe": probe,
"fp": _NMAP_CUR_PROBE,
}
return
else:
return
service, data = line.split(b" ", 1)
info = {"soft": soft}
while data:
if data.startswith(b"cpe:"):
key = "cpe"
data = data[4:]
else:
key = data[0:1].decode()
data = data[1:]
sep = data[0:1]
data = data[1:]
index = data.index(sep)
value = data[:index]
data = data[index + 1 :]
flag = b""
if data:
if b" " in data:
flag, data = data.split(b" ", 1)
else:
flag, data = data, b""
if key == "m":
if value.endswith(b"\\r\\n"):
value = value[:-4] + b"(?:\\r\\n|$)"
elif value.endswith(b"\\\\n"):
value = value[:3] + b"(?:\\\\n|$)"
elif value.endswith(b"\\n"):
value = value[:-2] + b"(?:\\n|$)"
value = re.compile(
value,
flags=sum(
getattr(re, f) if hasattr(re, f) else 0
for f in flag.decode().upper()
),
)
flag = b""
else:
value = value.decode()
info[key] = (value, flag)
_NMAP_CUR_PROBE.append((service.decode(), info))
try:
with open(
os.path.join(config.NMAP_SHARE_PATH, "nmap-service-probes"), "rb"
) as fdesc:
for line in fdesc:
parse_line(line[:-1])
except (AttributeError, TypeError, IOError):
LOGGER.warning("Cannot read Nmap service fingerprint file.", exc_info=True)
del _NMAP_CUR_PROBE
_NMAP_PROBES_POPULATED = True
|
https://github.com/cea-sec/ivre/issues/418
|
user@workstation:~/masscan/bin$ ivre nmap2db --archive --c MASSCAN-BANNER -s SCANNER BANNER.xml
WARNING:ivre:Exception (file 'BANNER.xml')
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/scan2db.py", line 122, in main
merge=args.merge, masscan_probes=args.masscan_probes,
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 362, in store_scan
return store_scan_function(fname, filehash=scanid, **kargs)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 383, in store_scan_xml
parser.parse(utils.open_file(fname))
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 107, in parse
xmlreader.IncrementalParser.parse(self, source)
File "/usr/lib/python2.7/xml/sax/xmlreader.py", line 123, in parse
self.feed(buffer)
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 210, in feed
self._parser.Parse(data, isFinal)
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 304, in start_element
self._cont_handler.startElement(name, AttributesImpl(attrs))
File "/usr/local/lib/python2.7/dist-packages/ivre/xmlnmap.py", line 1109, in startElement
probe=probe,
File "/usr/local/lib/python2.7/dist-packages/ivre/utils.py", line 843, in get_nmap_svc_fp
_read_nmap_probes()
File "/usr/local/lib/python2.7/dist-packages/ivre/utils.py", line 832, in _read_nmap_probes
parse_line(line[:-1])
File "/usr/local/lib/python2.7/dist-packages/ivre/utils.py", line 825, in parse_line
value = value.decode()
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 2: ordinal not in range(128)
0 results imported.
|
UnicodeDecodeError
|
def parse_line(line):
global _NMAP_PROBES, _NMAP_CUR_PROBE
if line.startswith(b"match "):
line = line[6:]
soft = False
elif line.startswith(b"softmatch "):
line = line[10:]
soft = True
elif line.startswith(b"Probe "):
_NMAP_CUR_PROBE = []
proto, name, probe = line[6:].split(b" ", 2)
_NMAP_PROBES.setdefault(proto.lower().decode(), {})[name.decode()] = {
"probe": probe,
"fp": _NMAP_CUR_PROBE,
}
return
else:
return
service, data = line.split(b" ", 1)
info = {"soft": soft}
while data:
if data.startswith(b"cpe:"):
key = "cpe"
data = data[4:]
else:
key = data[0:1].decode()
data = data[1:]
sep = data[0:1]
data = data[1:]
index = data.index(sep)
value = data[:index]
data = data[index + 1 :]
flag = b""
if data:
if b" " in data:
flag, data = data.split(b" ", 1)
else:
flag, data = data, b""
if key == "m":
if value.endswith(b"\\r\\n"):
value = value[:-4] + b"(?:\\r\\n|$)"
elif value.endswith(b"\\\\n"):
value = value[:3] + b"(?:\\\\n|$)"
elif value.endswith(b"\\n"):
value = value[:-2] + b"(?:\\n|$)"
value = re.compile(
value,
flags=sum(
getattr(re, f) if hasattr(re, f) else 0
for f in flag.decode().upper()
),
)
flag = b""
else:
try:
value = value.decode("utf-8")
except UnicodeDecodeError:
value = repr(value)
info[key] = (value, flag)
_NMAP_CUR_PROBE.append((service.decode(), info))
|
def parse_line(line):
global _NMAP_PROBES, _NMAP_CUR_PROBE
if line.startswith(b"match "):
line = line[6:]
soft = False
elif line.startswith(b"softmatch "):
line = line[10:]
soft = True
elif line.startswith(b"Probe "):
_NMAP_CUR_PROBE = []
proto, name, probe = line[6:].split(b" ", 2)
_NMAP_PROBES.setdefault(proto.lower().decode(), {})[name.decode()] = {
"probe": probe,
"fp": _NMAP_CUR_PROBE,
}
return
else:
return
service, data = line.split(b" ", 1)
info = {"soft": soft}
while data:
if data.startswith(b"cpe:"):
key = "cpe"
data = data[4:]
else:
key = data[0:1].decode()
data = data[1:]
sep = data[0:1]
data = data[1:]
index = data.index(sep)
value = data[:index]
data = data[index + 1 :]
flag = b""
if data:
if b" " in data:
flag, data = data.split(b" ", 1)
else:
flag, data = data, b""
if key == "m":
if value.endswith(b"\\r\\n"):
value = value[:-4] + b"(?:\\r\\n|$)"
elif value.endswith(b"\\\\n"):
value = value[:3] + b"(?:\\\\n|$)"
elif value.endswith(b"\\n"):
value = value[:-2] + b"(?:\\n|$)"
value = re.compile(
value,
flags=sum(
getattr(re, f) if hasattr(re, f) else 0
for f in flag.decode().upper()
),
)
flag = b""
else:
value = value.decode()
info[key] = (value, flag)
_NMAP_CUR_PROBE.append((service.decode(), info))
|
https://github.com/cea-sec/ivre/issues/418
|
user@workstation:~/masscan/bin$ ivre nmap2db --archive --c MASSCAN-BANNER -s SCANNER BANNER.xml
WARNING:ivre:Exception (file 'BANNER.xml')
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/scan2db.py", line 122, in main
merge=args.merge, masscan_probes=args.masscan_probes,
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 362, in store_scan
return store_scan_function(fname, filehash=scanid, **kargs)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 383, in store_scan_xml
parser.parse(utils.open_file(fname))
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 107, in parse
xmlreader.IncrementalParser.parse(self, source)
File "/usr/lib/python2.7/xml/sax/xmlreader.py", line 123, in parse
self.feed(buffer)
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 210, in feed
self._parser.Parse(data, isFinal)
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 304, in start_element
self._cont_handler.startElement(name, AttributesImpl(attrs))
File "/usr/local/lib/python2.7/dist-packages/ivre/xmlnmap.py", line 1109, in startElement
probe=probe,
File "/usr/local/lib/python2.7/dist-packages/ivre/utils.py", line 843, in get_nmap_svc_fp
_read_nmap_probes()
File "/usr/local/lib/python2.7/dist-packages/ivre/utils.py", line 832, in _read_nmap_probes
parse_line(line[:-1])
File "/usr/local/lib/python2.7/dist-packages/ivre/utils.py", line 825, in parse_line
value = value.decode()
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 2: ordinal not in range(128)
0 results imported.
|
UnicodeDecodeError
|
def masscan_post_http(self, script):
header = re.search(
re.escape(b"\nServer:") + b"[ \\\t]*([^\\\r\\\n]+)\\\r?(?:\\\n|$)",
script["masscan"]["raw"],
)
if header is None:
return
header = header.groups()[0]
self._curport.setdefault("scripts", []).append(
{
"id": "http-server-header",
"output": utils.nmap_encode_data(header),
"masscan": {
"raw": self._to_binary(header),
},
}
)
self._curport["service_product"] = utils.nmap_encode_data(header)
|
def masscan_post_http(self, script):
header = re.search(
re.escape("\nServer:") + "[ \\\t]*([^\\\r\\\n]+)\\\r?(?:\\\n|$)",
script["masscan"]["raw"],
)
if header is None:
return
header = header.groups()[0]
self._curport.setdefault("scripts", []).append(
{
"id": "http-server-header",
"output": utils.nmap_encode_data(header),
"masscan": {
"raw": self._to_binary(header),
},
}
)
self._curport["service_product"] = utils.nmap_encode_data(header)
|
https://github.com/cea-sec/ivre/issues/418
|
user@workstation:~/masscan/bin$ ivre nmap2db --archive --c MASSCAN-BANNER -s SCANNER BANNER.xml
WARNING:ivre:Exception (file 'BANNER.xml')
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/scan2db.py", line 122, in main
merge=args.merge, masscan_probes=args.masscan_probes,
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 362, in store_scan
return store_scan_function(fname, filehash=scanid, **kargs)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 383, in store_scan_xml
parser.parse(utils.open_file(fname))
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 107, in parse
xmlreader.IncrementalParser.parse(self, source)
File "/usr/lib/python2.7/xml/sax/xmlreader.py", line 123, in parse
self.feed(buffer)
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 210, in feed
self._parser.Parse(data, isFinal)
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 304, in start_element
self._cont_handler.startElement(name, AttributesImpl(attrs))
File "/usr/local/lib/python2.7/dist-packages/ivre/xmlnmap.py", line 1109, in startElement
probe=probe,
File "/usr/local/lib/python2.7/dist-packages/ivre/utils.py", line 843, in get_nmap_svc_fp
_read_nmap_probes()
File "/usr/local/lib/python2.7/dist-packages/ivre/utils.py", line 832, in _read_nmap_probes
parse_line(line[:-1])
File "/usr/local/lib/python2.7/dist-packages/ivre/utils.py", line 825, in parse_line
value = value.decode()
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 2: ordinal not in range(128)
0 results imported.
|
UnicodeDecodeError
|
def feed_all(self, masterid):
for scanid in self.get_scans():
try:
self.feed(masterid, scanid)
except LockError:
utils.LOGGER.error(
"Lock error - is another daemon process running?",
exc_info=True,
)
|
def feed_all(self, masterid):
for scanid in self.get_scans():
self.feed(masterid, scanid)
|
https://github.com/cea-sec/ivre/issues/403
|
root@ivreclient:~# ivre runscansagentdb --daemon
Traceback (most recent call last):
File "/usr/local/bin/ivre", line 78, in <module>
main()
File "/usr/local/bin/ivre", line 51, in main
tools.get_command(possible_commands[0])()
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/runscansagentdb.py", line 231, in main
ivre.db.db.agent.feed_all(masterid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1351, in feed_all
self.feed(masterid, scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1354, in feed
scan = self.lock_scan(scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1486, in lock_scan
scan = self._lock_scan(scanid, None, lockid.bytes)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/mongo.py", line 3372, in _lock_scan
if "target_info" not in scan:
TypeError: argument of type 'NoneType' is not iterable
|
TypeError
|
def feed(self, masterid, scanid):
scan = self.lock_scan(scanid)
# TODO: handle "onhold" targets
target = self.get_scan_target(scanid)
try:
for agentid in scan["agents"]:
if self.get_agent(agentid)["master"] == masterid:
for _ in range(self.may_receive(agentid)):
self.add_target(agentid, scanid, next(target))
except StopIteration:
# This scan is over, let's free its agents
for agentid in scan["agents"]:
self.unassign_agent(agentid)
self.update_scan_target(scanid, target)
self.unlock_scan(scan)
|
def feed(self, masterid, scanid):
scan = self.lock_scan(scanid)
if scan is None:
raise StandardError("Could not acquire lock for scan %s" % scanid)
# TODO: handle "onhold" targets
target = self.get_scan_target(scanid)
try:
for agentid in scan["agents"]:
if self.get_agent(agentid)["master"] == masterid:
for _ in range(self.may_receive(agentid)):
self.add_target(agentid, scanid, next(target))
except StopIteration:
# This scan is over, let's free its agents
for agentid in scan["agents"]:
self.unassign_agent(agentid)
self.update_scan_target(scanid, target)
self.unlock_scan(scan)
|
https://github.com/cea-sec/ivre/issues/403
|
root@ivreclient:~# ivre runscansagentdb --daemon
Traceback (most recent call last):
File "/usr/local/bin/ivre", line 78, in <module>
main()
File "/usr/local/bin/ivre", line 51, in main
tools.get_command(possible_commands[0])()
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/runscansagentdb.py", line 231, in main
ivre.db.db.agent.feed_all(masterid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1351, in feed_all
self.feed(masterid, scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1354, in feed
scan = self.lock_scan(scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1486, in lock_scan
scan = self._lock_scan(scanid, None, lockid.bytes)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/mongo.py", line 3372, in _lock_scan
if "target_info" not in scan:
TypeError: argument of type 'NoneType' is not iterable
|
TypeError
|
def lock_scan(self, scanid):
"""Acquire lock for scanid. Returns the new scan object on success,
and raises a LockError on failure.
"""
lockid = uuid.uuid1()
scan = self._lock_scan(scanid, None, lockid.bytes)
if scan["lock"] is not None:
# This might be a bug in uuid module, Python 2 only
## File "/opt/python/2.6.9/lib/python2.6/uuid.py", line 145, in __init__
## int = long(('%02x'*16) % tuple(map(ord, bytes)), 16)
# scan['lock'] = uuid.UUID(bytes=scan['lock'])
scan["lock"] = uuid.UUID(hex=utils.encode_hex(scan["lock"]).decode())
if scan["lock"] == lockid:
return scan
|
def lock_scan(self, scanid):
lockid = uuid.uuid1()
scan = self._lock_scan(scanid, None, lockid.bytes)
if scan["lock"] is not None:
# This might be a bug in uuid module, Python 2 only
## File "/opt/python/2.6.9/lib/python2.6/uuid.py", line 145, in __init__
## int = long(('%02x'*16) % tuple(map(ord, bytes)), 16)
# scan['lock'] = uuid.UUID(bytes=scan['lock'])
scan["lock"] = uuid.UUID(hex=utils.encode_hex(scan["lock"]).decode())
if scan["lock"] == lockid:
return scan
|
https://github.com/cea-sec/ivre/issues/403
|
root@ivreclient:~# ivre runscansagentdb --daemon
Traceback (most recent call last):
File "/usr/local/bin/ivre", line 78, in <module>
main()
File "/usr/local/bin/ivre", line 51, in main
tools.get_command(possible_commands[0])()
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/runscansagentdb.py", line 231, in main
ivre.db.db.agent.feed_all(masterid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1351, in feed_all
self.feed(masterid, scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1354, in feed
scan = self.lock_scan(scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1486, in lock_scan
scan = self._lock_scan(scanid, None, lockid.bytes)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/mongo.py", line 3372, in _lock_scan
if "target_info" not in scan:
TypeError: argument of type 'NoneType' is not iterable
|
TypeError
|
def unlock_scan(self, scan):
"""Release lock for scanid. Returns True on success, and raises a
LockError on failure.
"""
if scan.get("lock") is None:
raise LockError("Cannot release lock for %r: scan is not locked" % scan["_id"])
scan = self._lock_scan(scan["_id"], scan["lock"].bytes, None)
return scan["lock"] is None
|
def unlock_scan(self, scan):
if scan.get("lock") is None:
raise ValueError("Scan is not locked")
scan = self._lock_scan(scan["_id"], scan["lock"].bytes, None)
return scan["lock"] is None
|
https://github.com/cea-sec/ivre/issues/403
|
root@ivreclient:~# ivre runscansagentdb --daemon
Traceback (most recent call last):
File "/usr/local/bin/ivre", line 78, in <module>
main()
File "/usr/local/bin/ivre", line 51, in main
tools.get_command(possible_commands[0])()
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/runscansagentdb.py", line 231, in main
ivre.db.db.agent.feed_all(masterid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1351, in feed_all
self.feed(masterid, scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1354, in feed
scan = self.lock_scan(scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1486, in lock_scan
scan = self._lock_scan(scanid, None, lockid.bytes)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/mongo.py", line 3372, in _lock_scan
if "target_info" not in scan:
TypeError: argument of type 'NoneType' is not iterable
|
TypeError
|
def _lock_scan(self, scanid, oldlockid, newlockid):
"""Change lock for scanid from oldlockid to newlockid. Returns the new
scan object on success, and raises a LockError on failure.
"""
if oldlockid is not None:
oldlockid = bson.Binary(oldlockid)
if newlockid is not None:
newlockid = bson.Binary(newlockid)
scan = self.db[self.colname_scans].find_and_modify(
{
"_id": scanid,
"lock": oldlockid,
},
{
"$set": {"lock": newlockid, "pid": os.getpid()},
},
full_response=True,
fields={"target": False},
new=True,
)["value"]
if scan is None:
if oldlockid is None:
raise LockError("Cannot acquire lock for %r" % scanid)
if newlockid is None:
raise LockError("Cannot release lock for %r" % scanid)
raise LockError(
"Cannot change lock for %r from %r to %r" % (scanid, oldlockid, newlockid)
)
if "target_info" not in scan:
target = self.get_scan_target(scanid)
if target is not None:
target_info = target.target.infos
self.db[self.colname_scans].update(
{"_id": scanid},
{"$set": {"target_info": target_info}},
)
scan["target_info"] = target_info
if scan["lock"] is not None:
scan["lock"] = bytes(scan["lock"])
return scan
|
def _lock_scan(self, scanid, oldlockid, newlockid):
if oldlockid is not None:
oldlockid = bson.Binary(oldlockid)
if newlockid is not None:
newlockid = bson.Binary(newlockid)
scan = self.db[self.colname_scans].find_and_modify(
{
"_id": scanid,
"lock": oldlockid,
},
{
"$set": {"lock": newlockid},
},
full_response=True,
fields={"target": False},
new=True,
)["value"]
if "target_info" not in scan:
target = self.get_scan_target(scanid)
if target is not None:
target_info = target.target.infos
self.db[self.colname_scans].update(
{"_id": scanid},
{"$set": {"target_info": target_info}},
)
scan["target_info"] = target_info
if scan is not None and scan["lock"] is not None:
scan["lock"] = bytes(scan["lock"])
return scan
|
https://github.com/cea-sec/ivre/issues/403
|
root@ivreclient:~# ivre runscansagentdb --daemon
Traceback (most recent call last):
File "/usr/local/bin/ivre", line 78, in <module>
main()
File "/usr/local/bin/ivre", line 51, in main
tools.get_command(possible_commands[0])()
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/runscansagentdb.py", line 231, in main
ivre.db.db.agent.feed_all(masterid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1351, in feed_all
self.feed(masterid, scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1354, in feed
scan = self.lock_scan(scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1486, in lock_scan
scan = self._lock_scan(scanid, None, lockid.bytes)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/mongo.py", line 3372, in _lock_scan
if "target_info" not in scan:
TypeError: argument of type 'NoneType' is not iterable
|
TypeError
|
def display_scan(scan, verbose=True):
scan["target"] = ivre.db.db.agent.get_scan_target(scan["_id"])
print("scan:")
if verbose:
print(" - id: %s" % scan["_id"])
print(" - categories:")
for category in scan["target"].target.infos["categories"]:
print(" - %s" % category)
print(" - targets added: %d" % scan["target"].nextcount)
print(" - results fetched: %d" % scan["results"])
print(" - total targets to add: %d" % scan["target"].target.maxnbr)
print(" - available targets: %d" % scan["target"].target.targetscount)
if scan["target"].nextcount == scan["target"].target.maxnbr:
print(" - all targets have been added")
if scan["results"] == scan["target"].target.maxnbr:
print(" - all results have been retrieved")
if verbose:
print(" - internal state: %r" % (scan["target"].getstate(),))
if scan.get("lock") is not None:
print(" - locked", end="")
if scan.get("pid") is not None:
print(" (by %d)" % scan["pid"])
else:
print()
print(" - agents:")
for agent in scan["agents"]:
print(" - %s" % agent)
|
def display_scan(scan, verbose=True):
scan["target"] = ivre.db.db.agent.get_scan_target(scan["_id"])
print("scan:")
if verbose:
print(" - id: %s" % scan["_id"])
print(" - categories:")
for category in scan["target"].target.infos["categories"]:
print(" - %s" % category)
print(" - targets added: %d" % scan["target"].nextcount)
print(" - results fetched: %d" % scan["results"])
print(" - total targets to add: %d" % scan["target"].target.maxnbr)
print(" - available targets: %d" % scan["target"].target.targetscount)
if scan["target"].nextcount == scan["target"].target.maxnbr:
print(" - all targets have been added")
if scan["results"] == scan["target"].target.maxnbr:
print(" - all results have been retrieved")
if verbose:
print(" - internal state: %r" % (scan["target"].getstate(),))
print(" - agents:")
for agent in scan["agents"]:
print(" - %s" % agent)
|
https://github.com/cea-sec/ivre/issues/403
|
root@ivreclient:~# ivre runscansagentdb --daemon
Traceback (most recent call last):
File "/usr/local/bin/ivre", line 78, in <module>
main()
File "/usr/local/bin/ivre", line 51, in main
tools.get_command(possible_commands[0])()
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/runscansagentdb.py", line 231, in main
ivre.db.db.agent.feed_all(masterid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1351, in feed_all
self.feed(masterid, scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1354, in feed
scan = self.lock_scan(scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1486, in lock_scan
scan = self._lock_scan(scanid, None, lockid.bytes)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/mongo.py", line 3372, in _lock_scan
if "target_info" not in scan:
TypeError: argument of type 'NoneType' is not iterable
|
TypeError
|
def main():
try:
import argparse
parser = argparse.ArgumentParser(
description=__doc__, parents=[ivre.target.argparser]
)
except ImportError:
import optparse
parser = optparse.OptionParser(description=__doc__)
for args, kargs in ivre.target.argparser.args:
parser.add_option(*args, **kargs)
parser.parse_args_orig = parser.parse_args
parser.parse_args = lambda: parser.parse_args_orig()[0]
parser.add_argument = parser.add_option
parser.add_argument(
"--assign-free-agents",
action="store_true",
help="Assign any agent available (only useful when specifying a target).",
)
parser.add_argument(
"--max-waiting",
metavar="COUNT",
type=int,
default=60,
help="Maximum targets waiting (only affects --add-agent)",
)
parser.add_argument(
"--source", metavar="NAME", help="Source name (only affects --add-agent)"
)
parser.add_argument("--add-agent", metavar="(HOST:)PATH", nargs="+")
parser.add_argument("--del-agent", metavar="AGENT", nargs="+")
parser.add_argument("--add-local-master", action="store_true")
parser.add_argument(
"--master-path",
metavar="PATH",
default=ivre.config.AGENT_MASTER_PATH,
help="Non-default path to use for master "
"(default is specified by the configuration "
"attribute `AGENT_MASTER_PATH`)",
)
parser.add_argument("--list-agents", action="store_true")
parser.add_argument("--list-scans", action="store_true")
parser.add_argument("--list-masters", action="store_true")
parser.add_argument("--assign", metavar="AGENT:SCAN")
parser.add_argument("--unassign", metavar="AGENT")
parser.add_argument("--force-unlock", action="store_true")
parser.add_argument(
"--init",
action="store_true",
help="Purge or create and initialize the database.",
)
parser.add_argument(
"--sleep",
type=int,
default=2,
help="Time to wait between each feed/sync cycle (only usefull with --daemon).",
)
parser.add_argument(
"--daemon",
action="store_true",
help="""Run continuously
feed/sync cycles. The "sync" part requires to be able to rsync
to & from the agents non-interactively (without entering a
password). Please note this will *not* daemonize the
process.
""",
)
args = parser.parse_args()
if args.init:
if os.isatty(sys.stdin.fileno()):
sys.stdout.write(
"This will remove any agent and/or scan in your "
"database and files. Process? [y/N] "
)
ans = input()
if ans.lower() != "y":
sys.exit(-1)
ivre.db.db.agent.init()
ivre.utils.cleandir(args.master_path)
for dirname in ["output", "onhold"]:
ivre.utils.makedirs(os.path.join(args.master_path, dirname))
if args.add_local_master:
ivre.utils.makedirs(args.master_path)
ivre.db.db.agent.add_local_master(args.master_path)
if args.add_agent is not None:
masterid = ivre.db.db.agent.masterid_from_dir(args.master_path)
for agent in args.add_agent:
ivre.db.db.agent.add_agent_from_string(
masterid,
agent,
maxwaiting=args.max_waiting,
source=args.source,
)
if args.del_agent is not None:
for agentid in args.del_agent:
ivre.db.db.agent.del_agent(ivre.db.db.agent.str2id(agentid))
if args.assign is not None:
try:
agentid, scanid = (
ivre.db.db.agent.str2id(elt) for elt in args.assign.split(":", 1)
)
except ValueError:
parser.error("argument --assign: must give agentid:scanid")
ivre.db.db.agent.assign_agent(agentid, scanid)
if args.unassign is not None:
ivre.db.db.agent.unassign_agent(ivre.db.db.agent.str2id(args.unassign))
targets = ivre.target.target_from_args(args)
if targets is not None:
ivre.db.db.agent.add_scan(
targets, assign_to_free_agents=bool(args.assign_free_agents)
)
if args.force_unlock:
if os.isatty(sys.stdin.fileno()):
sys.stdout.write(
'Only use this when a "ivre runscansagentdb --daemon" process '
'has crashed. Make sure no "ivre runscansagentdb" process is '
"running or your scan data will be inconsistent. Process? "
"[y/N] "
)
ans = input()
if ans.lower() != "y":
sys.exit(-1)
for scanid in ivre.db.db.agent.get_scans():
scan = ivre.db.db.agent.get_scan(scanid)
if scan.get("lock") is not None:
ivre.db.db.agent.unlock_scan(scan)
if args.list_agents:
for agentid in ivre.db.db.agent.get_agents():
display_agent(ivre.db.db.agent.get_agent(agentid))
if args.list_scans:
for scanid in ivre.db.db.agent.get_scans():
display_scan(ivre.db.db.agent.get_scan(scanid))
if args.list_masters:
for masterid in ivre.db.db.agent.get_masters():
display_master(ivre.db.db.agent.get_master(masterid))
if args.daemon:
def terminate(signum, _):
global WANT_DOWN
ivre.utils.LOGGER.info(
"shutdown: got signal %d, will halt after current task.",
signum,
)
WANT_DOWN = True
def terminate_now(signum, _):
ivre.utils.LOGGER.info("shutdown: got signal %d, halting now.", signum)
exit()
signal.signal(signal.SIGINT, terminate)
signal.signal(signal.SIGTERM, terminate)
masterid = ivre.db.db.agent.masterid_from_dir(args.master_path)
while not WANT_DOWN:
ivre.db.db.agent.feed_all(masterid)
ivre.db.db.agent.sync_all(masterid)
signal.signal(signal.SIGINT, terminate_now)
signal.signal(signal.SIGTERM, terminate_now)
if not WANT_DOWN:
time.sleep(args.sleep)
signal.signal(signal.SIGINT, terminate)
signal.signal(signal.SIGTERM, terminate)
|
def main():
try:
import argparse
parser = argparse.ArgumentParser(
description=__doc__, parents=[ivre.target.argparser]
)
except ImportError:
import optparse
parser = optparse.OptionParser(description=__doc__)
for args, kargs in ivre.target.argparser.args:
parser.add_option(*args, **kargs)
parser.parse_args_orig = parser.parse_args
parser.parse_args = lambda: parser.parse_args_orig()[0]
parser.add_argument = parser.add_option
parser.add_argument(
"--assign-free-agents",
action="store_true",
help="Assign any agent available (only useful when specifying a target).",
)
parser.add_argument(
"--max-waiting",
metavar="COUNT",
type=int,
default=60,
help="Maximum targets waiting (only affects --add-agent)",
)
parser.add_argument(
"--source", metavar="NAME", help="Source name (only affects --add-agent)"
)
parser.add_argument("--add-agent", metavar="(HOST:)PATH", nargs="+")
parser.add_argument("--del-agent", metavar="AGENT", nargs="+")
parser.add_argument("--add-local-master", action="store_true")
parser.add_argument(
"--master-path",
metavar="PATH",
default=ivre.config.AGENT_MASTER_PATH,
help="Non-default path to use for master "
"(default is specified by the configuration "
"attribute `AGENT_MASTER_PATH`)",
)
parser.add_argument("--list-agents", action="store_true")
parser.add_argument("--list-scans", action="store_true")
parser.add_argument("--list-masters", action="store_true")
parser.add_argument("--assign", metavar="AGENT:SCAN")
parser.add_argument("--unassign", metavar="AGENT")
parser.add_argument(
"--init",
action="store_true",
help="Purge or create and initialize the database.",
)
parser.add_argument(
"--sleep",
type=int,
default=2,
help="Time to wait between each feed/sync cycle (only usefull with --daemon).",
)
parser.add_argument(
"--daemon",
action="store_true",
help="""Run continuously
feed/sync cycles. The "sync" part requires to be able to rsync
to & from the agents non-interactively (without entering a
password). Please note this will *not* daemonize the
process.
""",
)
args = parser.parse_args()
if args.init:
if os.isatty(sys.stdin.fileno()):
sys.stdout.write(
"This will remove any agent and/or scan in your "
"database and files. Process ? [y/N] "
)
ans = input()
if ans.lower() != "y":
sys.exit(-1)
ivre.db.db.agent.init()
ivre.utils.cleandir(args.master_path)
for dirname in ["output", "onhold"]:
ivre.utils.makedirs(os.path.join(args.master_path, dirname))
if args.add_local_master:
ivre.utils.makedirs(args.master_path)
ivre.db.db.agent.add_local_master(args.master_path)
if args.add_agent is not None:
masterid = ivre.db.db.agent.masterid_from_dir(args.master_path)
for agent in args.add_agent:
ivre.db.db.agent.add_agent_from_string(
masterid,
agent,
maxwaiting=args.max_waiting,
source=args.source,
)
if args.del_agent is not None:
for agentid in args.del_agent:
ivre.db.db.agent.del_agent(ivre.db.db.agent.str2id(agentid))
if args.assign is not None:
try:
agentid, scanid = (
ivre.db.db.agent.str2id(elt) for elt in args.assign.split(":", 1)
)
except ValueError:
parser.error("argument --assign: must give agentid:scanid")
ivre.db.db.agent.assign_agent(agentid, scanid)
if args.unassign is not None:
ivre.db.db.agent.unassign_agent(ivre.db.db.agent.str2id(args.unassign))
targets = ivre.target.target_from_args(args)
if targets is not None:
ivre.db.db.agent.add_scan(
targets, assign_to_free_agents=bool(args.assign_free_agents)
)
if args.list_agents:
for agent in (
ivre.db.db.agent.get_agent(agentid)
for agentid in ivre.db.db.agent.get_agents()
):
display_agent(agent)
if args.list_scans:
for scan in (
ivre.db.db.agent.get_scan(scanid) for scanid in ivre.db.db.agent.get_scans()
):
display_scan(scan)
if args.list_masters:
for master in (
ivre.db.db.agent.get_master(masterid)
for masterid in ivre.db.db.agent.get_masters()
):
display_master(master)
if args.daemon:
def terminate(signum, _):
global WANT_DOWN
print("SHUTDOWN: got signal %d, will halt after current task." % signum)
WANT_DOWN = True
def terminate_now(signum, _):
print("SHUTDOWN: got signal %d, halting now." % signum)
exit()
signal.signal(signal.SIGINT, terminate)
signal.signal(signal.SIGTERM, terminate)
masterid = ivre.db.db.agent.masterid_from_dir(args.master_path)
while not WANT_DOWN:
ivre.db.db.agent.feed_all(masterid)
ivre.db.db.agent.sync_all(masterid)
signal.signal(signal.SIGINT, terminate_now)
signal.signal(signal.SIGTERM, terminate_now)
if not WANT_DOWN:
time.sleep(args.sleep)
signal.signal(signal.SIGINT, terminate)
signal.signal(signal.SIGTERM, terminate)
|
https://github.com/cea-sec/ivre/issues/403
|
root@ivreclient:~# ivre runscansagentdb --daemon
Traceback (most recent call last):
File "/usr/local/bin/ivre", line 78, in <module>
main()
File "/usr/local/bin/ivre", line 51, in main
tools.get_command(possible_commands[0])()
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/runscansagentdb.py", line 231, in main
ivre.db.db.agent.feed_all(masterid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1351, in feed_all
self.feed(masterid, scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1354, in feed
scan = self.lock_scan(scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1486, in lock_scan
scan = self._lock_scan(scanid, None, lockid.bytes)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/mongo.py", line 3372, in _lock_scan
if "target_info" not in scan:
TypeError: argument of type 'NoneType' is not iterable
|
TypeError
|
def terminate(signum, _):
global WANT_DOWN
ivre.utils.LOGGER.info(
"shutdown: got signal %d, will halt after current task.",
signum,
)
WANT_DOWN = True
|
def terminate(signum, _):
global WANT_DOWN
print("SHUTDOWN: got signal %d, will halt after current task." % signum)
WANT_DOWN = True
|
https://github.com/cea-sec/ivre/issues/403
|
root@ivreclient:~# ivre runscansagentdb --daemon
Traceback (most recent call last):
File "/usr/local/bin/ivre", line 78, in <module>
main()
File "/usr/local/bin/ivre", line 51, in main
tools.get_command(possible_commands[0])()
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/runscansagentdb.py", line 231, in main
ivre.db.db.agent.feed_all(masterid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1351, in feed_all
self.feed(masterid, scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1354, in feed
scan = self.lock_scan(scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1486, in lock_scan
scan = self._lock_scan(scanid, None, lockid.bytes)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/mongo.py", line 3372, in _lock_scan
if "target_info" not in scan:
TypeError: argument of type 'NoneType' is not iterable
|
TypeError
|
def terminate_now(signum, _):
ivre.utils.LOGGER.info("shutdown: got signal %d, halting now.", signum)
exit()
|
def terminate_now(signum, _):
print("SHUTDOWN: got signal %d, halting now." % signum)
exit()
|
https://github.com/cea-sec/ivre/issues/403
|
root@ivreclient:~# ivre runscansagentdb --daemon
Traceback (most recent call last):
File "/usr/local/bin/ivre", line 78, in <module>
main()
File "/usr/local/bin/ivre", line 51, in main
tools.get_command(possible_commands[0])()
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/runscansagentdb.py", line 231, in main
ivre.db.db.agent.feed_all(masterid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1351, in feed_all
self.feed(masterid, scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1354, in feed
scan = self.lock_scan(scanid)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 1486, in lock_scan
scan = self._lock_scan(scanid, None, lockid.bytes)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/mongo.py", line 3372, in _lock_scan
if "target_info" not in scan:
TypeError: argument of type 'NoneType' is not iterable
|
TypeError
|
def endElement(self, name):
if name == "nmaprun":
self._curscan = None
elif name == "host":
# masscan -oX output has no "state" tag
if (
self._curhost.get("state", "up") == "up"
and (not self._needports or "ports" in self._curhost)
and (
not self._needopenports
or self._curhost.get("openports", {}).get("count")
)
):
if "openports" not in self._curhost:
self._curhost["openports"] = {"count": 0}
self._pre_addhost()
self._addhost()
self._curhost = None
elif name == "hostnames":
self._curhost["hostnames"] = self._curhostnames
self._curhostnames = None
elif name == "extraports":
self._curhost.setdefault("extraports", {}).update(self._curextraports)
self._curextraports = None
elif name == "port":
self._curhost.setdefault("ports", []).append(self._curport)
if self._curport.get("state_state") == "open":
openports = self._curhost.setdefault("openports", {})
openports["count"] = openports.get("count", 0) + 1
protoopenports = openports.setdefault(self._curport["protocol"], {})
protoopenports["count"] = protoopenports.get("count", 0) + 1
protoopenports.setdefault("ports", []).append(self._curport["port"])
self._curport = None
elif name == "script":
if self._curport is not None:
current = self._curport
elif self._curhost is not None:
current = self._curhost
else:
# We do not want to handle script tags outside host or
# port tags (usually scripts running on prerule /
# postrule)
self._curscript = None
if self._curtablepath:
utils.LOGGER.warning(
"self._curtablepath should be empty, got [%r]", self._curtablepath
)
self._curtable = {}
return
if self._curscript["id"] in SCREENSHOTS_SCRIPTS:
fname = SCREENSHOTS_SCRIPTS[self._curscript["id"]](self._curscript)
if fname is not None:
exceptions = []
for full_fname in [
fname,
os.path.join(os.path.dirname(self._fname), fname),
]:
try:
with open(full_fname) as fdesc:
data = fdesc.read()
trim_result = utils.trim_image(data)
if trim_result:
# When trim_result is False, the image no
# longer exists after trim
if trim_result is not True:
# Image has been trimmed
data = trim_result
current["screenshot"] = "field"
current["screendata"] = self._to_binary(data)
screenwords = utils.screenwords(data)
if screenwords is not None:
current["screenwords"] = screenwords
except Exception:
exceptions.append((sys.exc_info(), full_fname))
else:
exceptions = []
break
for exc_info, full_fname in exceptions:
utils.LOGGER.warning(
"Screenshot: exception (scanfile %r, file %r)",
self._fname,
full_fname,
exc_info=exc_info,
)
if ignore_script(self._curscript):
if self._curtablepath:
utils.LOGGER.warning(
"self._curtablepath should be empty, got [%r]", self._curtablepath
)
self._curtable = {}
self._curscript = None
return
infokey = self._curscript.get("id", None)
infokey = ALIASES_TABLE_ELEMS.get(infokey, infokey)
if self._curtable:
if self._curtablepath:
utils.LOGGER.warning(
"self._curtablepath should be empty, got [%r]", self._curtablepath
)
if infokey in CHANGE_TABLE_ELEMS:
self._curtable = CHANGE_TABLE_ELEMS[infokey](self._curtable)
self._curscript[infokey] = self._curtable
self._curtable = {}
elif infokey in ADD_TABLE_ELEMS:
infos = ADD_TABLE_ELEMS[infokey]
if isinstance(infos, utils.REGEXP_T):
infos = infos.search(self._curscript.get("output", ""))
if infos is not None:
infosdict = infos.groupdict()
if infosdict:
self._curscript[infokey] = infosdict
else:
infos = list(infos.groups())
if infos:
self._curscript[infokey] = infos
elif hasattr(infos, "__call__"):
infos = infos(self._curscript)
if infos is not None:
self._curscript[infokey] = infos
current.setdefault("scripts", []).append(self._curscript)
self._curscript = None
elif name in ["table", "elem"]:
if self._curscript.get("id") in IGNORE_TABLE_ELEMS:
return
if name == "elem":
lastlevel = self._curtable
for k in self._curtablepath[:-1]:
if k is None:
lastlevel = lastlevel[-1]
else:
lastlevel = lastlevel[k]
k = self._curtablepath[-1]
if isinstance(k, int):
lastlevel.append(self._curdata)
else:
lastlevel[k] = self._curdata
if k == "cpe":
self._add_cpe_to_host()
# stop recording characters
self._curdata = None
self._curtablepath.pop()
elif name == "hostscript" and "scripts" in self._curhost:
# "fake" port element, without a "protocol" key and with the
# magic value -1 for the "port" key.
self._curhost.setdefault("ports", []).append(
{"port": -1, "scripts": self._curhost.pop("scripts")}
)
elif name == "trace":
self._curhost.setdefault("traces", []).append(self._curtrace)
self._curtrace = None
elif name == "cpe":
self._add_cpe_to_host()
|
def endElement(self, name):
if name == "nmaprun":
self._curscan = None
elif name == "host":
# masscan -oX output has no "state" tag
if (
self._curhost.get("state", "up") == "up"
and (not self._needports or "ports" in self._curhost)
and (
not self._needopenports
or self._curhost.get("openports", {}).get("count")
)
):
if "openports" not in self._curhost:
self._curhost["openports"] = {"count": 0}
self._pre_addhost()
self._addhost()
self._curhost = None
elif name == "hostnames":
self._curhost["hostnames"] = self._curhostnames
self._curhostnames = None
elif name == "extraports":
self._curhost.setdefault("extraports", {}).update(self._curextraports)
self._curextraports = None
elif name == "port":
self._curhost.setdefault("ports", []).append(self._curport)
if self._curport.get("state_state") == "open":
openports = self._curhost.setdefault("openports", {})
openports["count"] = openports.get("count", 0) + 1
protoopenports = openports.setdefault(self._curport["protocol"], {})
protoopenports["count"] = protoopenports.get("count", 0) + 1
protoopenports.setdefault("ports", []).append(self._curport["port"])
self._curport = None
elif name == "script":
if self._curport is not None:
current = self._curport
elif self._curhost is not None:
current = self._curhost
else:
# We do not want to handle script tags outside host or
# port tags (usually scripts running on prerule /
# postrule)
self._curscript = None
if self._curtablepath:
utils.LOGGER.warning(
"self._curtablepath should be empty, got [%r]", self._curtablepath
)
self._curtable = {}
return
if self._curscript["id"] in SCREENSHOTS_SCRIPTS:
fname = SCREENSHOTS_SCRIPTS[self._curscript["id"]](self._curscript)
if fname is not None:
exceptions = []
for full_fname in [
fname,
os.path.join(os.path.dirname(self._fname), fname),
]:
try:
with open(full_fname) as fdesc:
data = fdesc.read()
trim_result = utils.trim_image(data)
if trim_result:
# When trim_result is False, the image no
# longer exists after trim
if trim_result is not True:
# Image has been trimmed
data = trim_result
current["screenshot"] = "field"
current["screendata"] = self._to_binary(data)
screenwords = utils.screenwords(data)
if screenwords is not None:
current["screenwords"] = screenwords
except Exception:
exceptions.append((sys.exc_info(), full_fname))
else:
exceptions = []
break
for exc_info, full_fname in exceptions:
utils.LOGGER.warning(
"Screenshot: exception (scanfile %r, file %r)",
self._fname,
full_fname,
exc_info=exc_info,
)
if ignore_script(self._curscript):
self._curscript = None
return
infokey = self._curscript.get("id", None)
infokey = ALIASES_TABLE_ELEMS.get(infokey, infokey)
if self._curtable:
if self._curtablepath:
utils.LOGGER.warning(
"self._curtablepath should be empty, got [%r]", self._curtablepath
)
if infokey in CHANGE_TABLE_ELEMS:
self._curtable = CHANGE_TABLE_ELEMS[infokey](self._curtable)
self._curscript[infokey] = self._curtable
self._curtable = {}
elif infokey in ADD_TABLE_ELEMS:
infos = ADD_TABLE_ELEMS[infokey]
if isinstance(infos, utils.REGEXP_T):
infos = infos.search(self._curscript.get("output", ""))
if infos is not None:
infosdict = infos.groupdict()
if infosdict:
self._curscript[infokey] = infosdict
else:
infos = list(infos.groups())
if infos:
self._curscript[infokey] = infos
elif hasattr(infos, "__call__"):
infos = infos(self._curscript)
if infos is not None:
self._curscript[infokey] = infos
current.setdefault("scripts", []).append(self._curscript)
self._curscript = None
elif name in ["table", "elem"]:
if self._curscript.get("id") in IGNORE_TABLE_ELEMS:
return
if name == "elem":
lastlevel = self._curtable
for k in self._curtablepath[:-1]:
if k is None:
lastlevel = lastlevel[-1]
else:
lastlevel = lastlevel[k]
k = self._curtablepath[-1]
if isinstance(k, int):
lastlevel.append(self._curdata)
else:
lastlevel[k] = self._curdata
if k == "cpe":
self._add_cpe_to_host()
# stop recording characters
self._curdata = None
self._curtablepath.pop()
elif name == "hostscript" and "scripts" in self._curhost:
# "fake" port element, without a "protocol" key and with the
# magic value -1 for the "port" key.
self._curhost.setdefault("ports", []).append(
{"port": -1, "scripts": self._curhost.pop("scripts")}
)
elif name == "trace":
self._curhost.setdefault("traces", []).append(self._curtrace)
self._curtrace = None
elif name == "cpe":
self._add_cpe_to_host()
|
https://github.com/cea-sec/ivre/issues/406
|
WARNING:ivre:Exception (file './scans/NET-10.10.10.10_32/up/10/10/10/10.xml')
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/ivre/tools/scan2db.py", line 122, in main
merge=args.merge, masscan_probes=args.masscan_probes,
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 362, in store_scan
return store_scan_function(fname, filehash=scanid, **kargs)
File "/usr/local/lib/python2.7/dist-packages/ivre/db/__init__.py", line 383, in store_scan_xml
parser.parse(utils.open_file(fname))
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 110, in parse
xmlreader.IncrementalParser.parse(self, source)
File "/usr/lib/python2.7/xml/sax/xmlreader.py", line 123, in parse
self.feed(buffer)
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 213, in feed
self._parser.Parse(data, isFinal)
File "/usr/lib/python2.7/xml/sax/expatreader.py", line 317, in start_element
self._cont_handler.startElement(name, AttributesImpl(attrs))
File "/usr/local/lib/python2.7/dist-packages/ivre/xmlnmap.py", line 1158, in startElement
lastlevel.append(obj)
AttributeError: 'dict' object has no attribute 'append'
0 results imported.
|
AttributeError
|
def run(self):
self._dirs = self.get_dirs()
for dirname in self._dirs:
if os.path.isdir(dirname):
self._watcher.add_watch(dirname, mask=self.event_mask)
for event in self._watcher.event_gen():
if event is None:
continue
filename = event[3]
self._callback(filename)
|
def run(self):
self._dirs = self.get_dirs()
for dirname in self._dirs:
self._watcher.add_watch(dirname, mask=self.event_mask)
for event in self._watcher.event_gen():
if event is None:
continue
filename = event[3]
self._callback(filename)
|
https://github.com/benoitc/gunicorn/issues/2244
|
[2020-01-21 21:01:06 +0100] [19910] [INFO] Starting gunicorn 20.0.4
[2020-01-21 21:01:06 +0100] [19910] [INFO] Listening at: http://127.0.0.1:8000 (19910)
[2020-01-21 21:01:06 +0100] [19910] [INFO] Using worker: sync
[2020-01-21 21:01:06 +0100] [19913] [INFO] Booting worker with pid: 19913
[2020-01-21 21:01:10 +0100] [19913] [INFO] Worker reloading: /home/liquid/tmp/gunicorn/app.py modified
[2020-01-21 21:01:10 +0100] [19913] [INFO] Worker exiting (pid: 19913)
[2020-01-21 21:01:10 +0100] [19931] [INFO] Booting worker with pid: 19931
[2020-01-21 21:01:10 +0100] [19931] [ERROR] invalid syntax (app.py, line 4)
Traceback (most recent call last):
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/workers/base.py", line 144, in load_wsgi
self.wsgi = self.app.wsgi()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/base.py", line 67, in wsgi
self.callable = self.load()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 49, in load
return self.load_wsgiapp()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 39, in load_wsgiapp
return util.import_app(self.app_uri)
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/util.py", line 358, in import_app
mod = importlib.import_module(module)
File "/usr/lib/python3.7/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 967, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 677, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 724, in exec_module
File "<frozen importlib._bootstrap_external>", line 860, in get_code
File "<frozen importlib._bootstrap_external>", line 791, in source_to_code
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/liquid/tmp/gunicorn/app.py", line 4
def index():
^
SyntaxError: invalid syntax
[2020-01-21 21:01:10 +0100] [19931] [ERROR] Exception in worker process
Traceback (most recent call last):
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/workers/base.py", line 144, in load_wsgi
self.wsgi = self.app.wsgi()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/base.py", line 67, in wsgi
self.callable = self.load()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 49, in load
return self.load_wsgiapp()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 39, in load_wsgiapp
return util.import_app(self.app_uri)
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/util.py", line 358, in import_app
mod = importlib.import_module(module)
File "/usr/lib/python3.7/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 967, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 677, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 724, in exec_module
File "<frozen importlib._bootstrap_external>", line 860, in get_code
File "<frozen importlib._bootstrap_external>", line 791, in source_to_code
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/liquid/tmp/gunicorn/app.py", line 4
def index():
^
SyntaxError: invalid syntax
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/arbiter.py", line 583, in spawn_worker
worker.init_process()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/workers/base.py", line 119, in init_process
self.load_wsgi()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/workers/base.py", line 157, in load_wsgi
self.reloader.add_extra_file(exc_val.filename)
AttributeError: 'NoneType' object has no attribute 'add_extra_file'
[2020-01-21 21:01:10 +0100] [19931] [INFO] Worker exiting (pid: 19931)
[2020-01-21 21:01:10 +0100] [19910] [INFO] Shutting down: Master
[2020-01-21 21:01:10 +0100] [19910] [INFO] Reason: Worker failed to boot.
|
SyntaxError
|
def init_process(self):
"""\
If you override this method in a subclass, the last statement
in the function should be to call this method with
super().init_process() so that the ``run()`` loop is initiated.
"""
# set environment' variables
if self.cfg.env:
for k, v in self.cfg.env.items():
os.environ[k] = v
util.set_owner_process(self.cfg.uid, self.cfg.gid, initgroups=self.cfg.initgroups)
# Reseed the random number generator
util.seed()
# For waking ourselves up
self.PIPE = os.pipe()
for p in self.PIPE:
util.set_non_blocking(p)
util.close_on_exec(p)
# Prevent fd inheritance
for s in self.sockets:
util.close_on_exec(s)
util.close_on_exec(self.tmp.fileno())
self.wait_fds = self.sockets + [self.PIPE[0]]
self.log.close_on_exec()
self.init_signals()
# start the reloader
if self.cfg.reload:
def changed(fname):
self.log.info("Worker reloading: %s modified", fname)
self.alive = False
os.write(self.PIPE[1], b"1")
self.cfg.worker_int(self)
time.sleep(0.1)
sys.exit(0)
reloader_cls = reloader_engines[self.cfg.reload_engine]
self.reloader = reloader_cls(
extra_files=self.cfg.reload_extra_files, callback=changed
)
self.load_wsgi()
if self.reloader:
self.reloader.start()
self.cfg.post_worker_init(self)
# Enter main run loop
self.booted = True
self.run()
|
def init_process(self):
"""\
If you override this method in a subclass, the last statement
in the function should be to call this method with
super().init_process() so that the ``run()`` loop is initiated.
"""
# set environment' variables
if self.cfg.env:
for k, v in self.cfg.env.items():
os.environ[k] = v
util.set_owner_process(self.cfg.uid, self.cfg.gid, initgroups=self.cfg.initgroups)
# Reseed the random number generator
util.seed()
# For waking ourselves up
self.PIPE = os.pipe()
for p in self.PIPE:
util.set_non_blocking(p)
util.close_on_exec(p)
# Prevent fd inheritance
for s in self.sockets:
util.close_on_exec(s)
util.close_on_exec(self.tmp.fileno())
self.wait_fds = self.sockets + [self.PIPE[0]]
self.log.close_on_exec()
self.init_signals()
self.load_wsgi()
# start the reloader
if self.cfg.reload:
def changed(fname):
self.log.info("Worker reloading: %s modified", fname)
self.alive = False
os.write(self.PIPE[1], b"1")
self.cfg.worker_int(self)
time.sleep(0.1)
sys.exit(0)
reloader_cls = reloader_engines[self.cfg.reload_engine]
self.reloader = reloader_cls(
extra_files=self.cfg.reload_extra_files, callback=changed
)
self.reloader.start()
self.cfg.post_worker_init(self)
# Enter main run loop
self.booted = True
self.run()
|
https://github.com/benoitc/gunicorn/issues/2244
|
[2020-01-21 21:01:06 +0100] [19910] [INFO] Starting gunicorn 20.0.4
[2020-01-21 21:01:06 +0100] [19910] [INFO] Listening at: http://127.0.0.1:8000 (19910)
[2020-01-21 21:01:06 +0100] [19910] [INFO] Using worker: sync
[2020-01-21 21:01:06 +0100] [19913] [INFO] Booting worker with pid: 19913
[2020-01-21 21:01:10 +0100] [19913] [INFO] Worker reloading: /home/liquid/tmp/gunicorn/app.py modified
[2020-01-21 21:01:10 +0100] [19913] [INFO] Worker exiting (pid: 19913)
[2020-01-21 21:01:10 +0100] [19931] [INFO] Booting worker with pid: 19931
[2020-01-21 21:01:10 +0100] [19931] [ERROR] invalid syntax (app.py, line 4)
Traceback (most recent call last):
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/workers/base.py", line 144, in load_wsgi
self.wsgi = self.app.wsgi()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/base.py", line 67, in wsgi
self.callable = self.load()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 49, in load
return self.load_wsgiapp()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 39, in load_wsgiapp
return util.import_app(self.app_uri)
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/util.py", line 358, in import_app
mod = importlib.import_module(module)
File "/usr/lib/python3.7/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 967, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 677, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 724, in exec_module
File "<frozen importlib._bootstrap_external>", line 860, in get_code
File "<frozen importlib._bootstrap_external>", line 791, in source_to_code
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/liquid/tmp/gunicorn/app.py", line 4
def index():
^
SyntaxError: invalid syntax
[2020-01-21 21:01:10 +0100] [19931] [ERROR] Exception in worker process
Traceback (most recent call last):
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/workers/base.py", line 144, in load_wsgi
self.wsgi = self.app.wsgi()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/base.py", line 67, in wsgi
self.callable = self.load()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 49, in load
return self.load_wsgiapp()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 39, in load_wsgiapp
return util.import_app(self.app_uri)
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/util.py", line 358, in import_app
mod = importlib.import_module(module)
File "/usr/lib/python3.7/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 967, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 677, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 724, in exec_module
File "<frozen importlib._bootstrap_external>", line 860, in get_code
File "<frozen importlib._bootstrap_external>", line 791, in source_to_code
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/liquid/tmp/gunicorn/app.py", line 4
def index():
^
SyntaxError: invalid syntax
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/arbiter.py", line 583, in spawn_worker
worker.init_process()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/workers/base.py", line 119, in init_process
self.load_wsgi()
File "/home/liquid/tmp/gunicorn/venv/lib/python3.7/site-packages/gunicorn/workers/base.py", line 157, in load_wsgi
self.reloader.add_extra_file(exc_val.filename)
AttributeError: 'NoneType' object has no attribute 'add_extra_file'
[2020-01-21 21:01:10 +0100] [19931] [INFO] Worker exiting (pid: 19931)
[2020-01-21 21:01:10 +0100] [19910] [INFO] Shutting down: Master
[2020-01-21 21:01:10 +0100] [19910] [INFO] Reason: Worker failed to boot.
|
SyntaxError
|
def fromfd(fd, keep_fd=True):
"""Create a socket from a file descriptor
socket domain (family), type and protocol are auto-detected. By default
the socket uses a dup()ed fd. The original fd can be closed.
The parameter `keep_fd` influences fd duplication. Under Python 2 the
fd is still duplicated but the input fd is closed. Under Python 3 and
with `keep_fd=True`, the new socket object uses the same fd.
:param fd: socket fd
:type fd: int
:param keep_fd: keep input fd
:type keep_fd: bool
:return: socket.socket instance
:raises OSError: for invalid socket fd
"""
sockaddr = _getsockname(fd)
family = sockaddr.sa_family
if hasattr(socket, "SO_TYPE"):
typ = _getsockopt(fd, socket.SOL_SOCKET, getattr(socket, "SO_TYPE"))
else:
typ = socket.SOCK_STREAM
if hasattr(socket, "SO_PROTOCOL"):
proto = _getsockopt(fd, socket.SOL_SOCKET, getattr(socket, "SO_PROTOCOL"))
else:
proto = 0
if keep_fd:
return socket.fromfd(fd, family, typ, proto)
else:
return socket.socket(family, typ, proto, fileno=fd)
|
def fromfd(fd, keep_fd=True):
"""Create a socket from a file descriptor
socket domain (family), type and protocol are auto-detected. By default
the socket uses a dup()ed fd. The original fd can be closed.
The parameter `keep_fd` influences fd duplication. Under Python 2 the
fd is still duplicated but the input fd is closed. Under Python 3 and
with `keep_fd=True`, the new socket object uses the same fd.
:param fd: socket fd
:type fd: int
:param keep_fd: keep input fd
:type keep_fd: bool
:return: socket.socket instance
:raises OSError: for invalid socket fd
"""
family = _raw_getsockname(fd)
if hasattr(socket, "SO_TYPE"):
typ = _raw_getsockopt(fd, socket.SOL_SOCKET, getattr(socket, "SO_TYPE"))
else:
typ = socket.SOCK_STREAM
if hasattr(socket, "SO_PROTOCOL"):
proto = _raw_getsockopt(fd, socket.SOL_SOCKET, getattr(socket, "SO_PROTOCOL"))
else:
proto = 0
if keep_fd:
return socket.fromfd(fd, family, typ, proto)
else:
return socket.socket(family, typ, proto, fileno=fd)
|
https://github.com/benoitc/gunicorn/issues/2201
|
[2019-11-25 11:33:44 +0800] [3967] [INFO] Starting gunicorn 20.0.2
[2019-11-25 11:33:44 +0800] [3967] [INFO] Listening at: http://127.0.0.1:8000 (3967)
[2019-11-25 11:33:44 +0800] [3967] [INFO] Using worker: sync
[2019-11-25 11:33:44 +0800] [3970] [INFO] Booting worker with pid: 3970
[2019-11-25 11:33:51 +0800] [3967] [INFO] Handling signal: usr2
[2019-11-25 11:33:51 +0800] [3975] [INFO] Starting gunicorn 20.0.2
Traceback (most recent call last):
File "/Users/xx/project/py37/bin/gunicorn", line 10, in <module>
sys.exit(run())
File "/Users/xx/project/py37/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 58, in run
WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]").run()
File "/Users/xx/project/py37/lib/python3.7/site-packages/gunicorn/app/base.py", line 219, in run
super().run()
File "/Users/xx/project/py37/lib/python3.7/site-packages/gunicorn/app/base.py", line 71, in run
Arbiter(self).run()
File "/Users/xx/project/py37/lib/python3.7/site-packages/gunicorn/arbiter.py", line 198, in run
self.start()
File "/Users/xx/project/py37/lib/python3.7/site-packages/gunicorn/arbiter.py", line 155, in start
self.LISTENERS = sock.create_sockets(self.cfg, self.log, fds)
File "/Users/xx/project/py37/lib/python3.7/site-packages/gunicorn/sock.py", line 171, in create_sockets
sock = fromfd(fd)
File "/Users/xx/project/py37/lib/python3.7/site-packages/gunicorn/socketfromfd.py", line 95, in fromfd
family = _raw_getsockopt(fd, socket.SOL_SOCKET, SO_DOMAIN)
File "/Users/xx/project/py37/lib/python3.7/site-packages/gunicorn/socketfromfd.py", line 74, in _raw_getsockopt
ctypes.byref(optval), ctypes.byref(optlen))
File "/Users/xx/project/py37/lib/python3.7/site-packages/gunicorn/socketfromfd.py", line 44, in _errcheck_errno
raise OSError(errno, os.strerror(errno))
OSError: [Errno 42] Protocol not available
|
OSError
|
def _eventlet_sendfile(fdout, fdin, offset, nbytes, _os_sendfile=os.sendfile):
while True:
try:
return _os_sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if e.args[0] == errno.EAGAIN:
trampoline(fdout, write=True)
else:
raise
|
def _eventlet_sendfile(fdout, fdin, offset, nbytes):
while True:
try:
return os.sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if e.args[0] == errno.EAGAIN:
trampoline(fdout, write=True)
else:
raise
|
https://github.com/benoitc/gunicorn/issues/1925
|
Traceback (most recent call last):
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/workers/base_async.py", line 111, in handle_request
resp.write_file(respiter)
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/http/wsgi.py", line 392, in write_file
if not self.sendfile(respiter):
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/http/wsgi.py", line 382, in sendfile
sent += os.sendfile(sockno, fileno, offset + sent, count)
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/workers/ggevent.py", line 37, in _gevent_sendfile
return os.sendfile(fdout, fdin, offset, nbytes)
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/workers/ggevent.py", line 37, in _gevent_sendfile
return os.sendfile(fdout, fdin, offset, nbytes)
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/workers/ggevent.py", line 37, in _gevent_sendfile
return os.sendfile(fdout, fdin, offset, nbytes)
[Previous line repeated 923 more times]
RecursionError: maximum recursion depth exceeded
|
RecursionError
|
def _gevent_sendfile(fdout, fdin, offset, nbytes, _os_sendfile=os.sendfile):
while True:
try:
return _os_sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if e.args[0] == errno.EAGAIN:
socket.wait_write(fdout)
else:
raise
|
def _gevent_sendfile(fdout, fdin, offset, nbytes):
while True:
try:
return os.sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if e.args[0] == errno.EAGAIN:
socket.wait_write(fdout)
else:
raise
|
https://github.com/benoitc/gunicorn/issues/1925
|
Traceback (most recent call last):
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/workers/base_async.py", line 111, in handle_request
resp.write_file(respiter)
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/http/wsgi.py", line 392, in write_file
if not self.sendfile(respiter):
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/http/wsgi.py", line 382, in sendfile
sent += os.sendfile(sockno, fileno, offset + sent, count)
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/workers/ggevent.py", line 37, in _gevent_sendfile
return os.sendfile(fdout, fdin, offset, nbytes)
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/workers/ggevent.py", line 37, in _gevent_sendfile
return os.sendfile(fdout, fdin, offset, nbytes)
File "/usr/local/share/virtualenv/pywb/lib/python3.6/site-packages/gunicorn/workers/ggevent.py", line 37, in _gevent_sendfile
return os.sendfile(fdout, fdin, offset, nbytes)
[Previous line repeated 923 more times]
RecursionError: maximum recursion depth exceeded
|
RecursionError
|
def _get_user(self, environ):
user = None
http_auth = environ.get("HTTP_AUTHORIZATION")
if http_auth and http_auth.startswith("Basic"):
auth = http_auth.split(" ", 1)
if len(auth) == 2:
try:
# b64decode doesn't accept unicode in Python < 3.3
# so we need to convert it to a byte string
auth = base64.b64decode(auth[1].strip().encode("utf-8"))
if PY3: # b64decode returns a byte string in Python 3
auth = auth.decode("utf-8")
auth = auth.split(":", 1)
except (TypeError, binascii.Error, UnicodeDecodeError) as exc:
self.debug("Couldn't get username: %s", exc)
return user
if len(auth) == 2:
user = auth[0]
return user
|
def _get_user(self, environ):
user = None
http_auth = environ.get("HTTP_AUTHORIZATION")
if http_auth and http_auth.startswith("Basic"):
auth = http_auth.split(" ", 1)
if len(auth) == 2:
try:
# b64decode doesn't accept unicode in Python < 3.3
# so we need to convert it to a byte string
auth = base64.b64decode(auth[1].strip().encode("utf-8"))
if PY3: # b64decode returns a byte string in Python 3
auth = auth.decode("utf-8")
auth = auth.split(":", 1)
except TypeError as exc:
self.debug("Couldn't get username: %s", exc)
return user
except binascii.Error as exc:
self.debug("Couldn't get username: %s", exc)
return user
if len(auth) == 2:
user = auth[0]
return user
|
https://github.com/benoitc/gunicorn/issues/1683
|
[2018-01-15 18:23:06 +0000] [15] [ERROR] Error handling request
Traceback (most recent call last):
File "/app/.heroku/python/lib/python3.6/site-packages/gunicorn/glogging.py", line 269, in
'u': self._get_user(environ) or '-',
File "/app/.heroku/python/lib/python3.6/site-packages/gunicorn/glogging.py", line 446, in
auth = auth.decode('utf-8')
UnicodeDecodeError: 'utf-8' codec can't decode byte 0x8b in position 0: invalid start byte
|
UnicodeDecodeError
|
def validate(self):
"""Validate pidfile and make it stale if needed"""
if not self.fname:
return
try:
with open(self.fname, "r") as f:
try:
wpid = int(f.read())
except ValueError:
return
try:
os.kill(wpid, 0)
return wpid
except OSError as e:
if e.args[0] == errno.EPERM:
return wpid
if e.args[0] == errno.ESRCH:
return
raise
except IOError as e:
if e.args[0] == errno.ENOENT:
return
raise
|
def validate(self):
"""Validate pidfile and make it stale if needed"""
if not self.fname:
return
try:
with open(self.fname, "r") as f:
try:
wpid = int(f.read())
except ValueError:
return
try:
os.kill(wpid, 0)
return wpid
except OSError as e:
if e.args[0] == errno.ESRCH:
return
raise
except IOError as e:
if e.args[0] == errno.ENOENT:
return
raise
|
https://github.com/benoitc/gunicorn/issues/1091
|
Traceback (most recent call last):
File "/opt/python2.7/bin/gunicorn", line 11, in <module>
sys.exit(run())
File "/opt/python2.7/lib/python2.7/site-packages/gunicorn/app/wsgiapp.py", line 74, in run
WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]").run()
File "/opt/python2.7/lib/python2.7/site-packages/gunicorn/app/base.py", line 189, in run
super(Application, self).run()
File "/opt/python2.7/lib/python2.7/site-packages/gunicorn/app/base.py", line 72, in run
Arbiter(self).run()
File "/opt/python2.7/lib/python2.7/site-packages/gunicorn/arbiter.py", line 171, in run
self.start()
File "/opt/python2.7/lib/python2.7/site-packages/gunicorn/arbiter.py", line 125, in start
self.pidfile.create(self.pid)
File "/opt/python2.7/lib/python2.7/site-packages/gunicorn/pidfile.py", line 23, in create
oldpid = self.validate()
File "/opt/python2.7/lib/python2.7/site-packages/gunicorn/pidfile.py", line 75, in validate
os.kill(wpid, 0)
OSError: [Errno 1] Operation not permitted
|
OSError
|
def start(self):
"""\
Initialize the arbiter. Start listening and set pidfile if needed.
"""
self.log.info("Starting gunicorn %s", __version__)
if "GUNICORN_PID" in os.environ:
self.master_pid = int(os.environ.get("GUNICORN_PID"))
self.proc_name = self.proc_name + ".2"
self.master_name = "Master.2"
self.pid = os.getpid()
if self.cfg.pidfile is not None:
pidname = self.cfg.pidfile
if self.master_pid != 0:
pidname += ".2"
self.pidfile = Pidfile(pidname)
self.pidfile.create(self.pid)
self.cfg.on_starting(self)
self.init_signals()
if not self.LISTENERS:
fds = None
listen_fds = systemd.listen_fds()
if listen_fds:
self.systemd = True
fds = range(
systemd.SD_LISTEN_FDS_START, systemd.SD_LISTEN_FDS_START + listen_fds
)
elif self.master_pid:
fds = []
for fd in os.environ.pop("GUNICORN_FD").split(","):
fds.append(int(fd))
self.LISTENERS = sock.create_sockets(self.cfg, self.log, fds)
listeners_str = ",".join([str(l) for l in self.LISTENERS])
self.log.debug("Arbiter booted")
self.log.info("Listening at: %s (%s)", listeners_str, self.pid)
self.log.info("Using worker: %s", self.cfg.worker_class_str)
# check worker class requirements
if hasattr(self.worker_class, "check_config"):
self.worker_class.check_config(self.cfg, self.log)
self.cfg.when_ready(self)
|
def start(self):
"""\
Initialize the arbiter. Start listening and set pidfile if needed.
"""
self.log.info("Starting gunicorn %s", __version__)
if "GUNICORN_PID" in os.environ:
self.master_pid = int(os.environ.get("GUNICORN_PID"))
self.proc_name = self.proc_name + ".2"
self.master_name = "Master.2"
self.pid = os.getpid()
if self.cfg.pidfile is not None:
pidname = self.cfg.pidfile
if self.master_pid != 0:
pidname += ".2"
self.pidfile = Pidfile(pidname)
self.pidfile.create(self.pid)
self.cfg.on_starting(self)
self.init_signals()
if not self.LISTENERS:
fds = []
listen_fds = systemd.listen_fds()
if listen_fds:
self.systemd = True
fds = range(
systemd.SD_LISTEN_FDS_START, systemd.SD_LISTEN_FDS_START + listen_fds
)
elif self.master_pid:
for fd in os.environ.pop("GUNICORN_FD").split(","):
fds.append(int(fd))
self.LISTENERS = sock.create_sockets(self.cfg, self.log, fds)
listeners_str = ",".join([str(l) for l in self.LISTENERS])
self.log.debug("Arbiter booted")
self.log.info("Listening at: %s (%s)", listeners_str, self.pid)
self.log.info("Using worker: %s", self.cfg.worker_class_str)
# check worker class requirements
if hasattr(self.worker_class, "check_config"):
self.worker_class.check_config(self.cfg, self.log)
self.cfg.when_ready(self)
|
https://github.com/benoitc/gunicorn/issues/1423
|
Traceback (most recent call last):
File "/home/berker/projects/gunicorn/gunicorn/arbiter.py", line 574, in spawn_worker
worker.init_process()
File "/home/berker/projects/gunicorn/gunicorn/workers/base.py", line 140, in init_process
self.run()
File "/home/berker/projects/gunicorn/gunicorn/workers/sync.py", line 124, in run
self.run_for_one(timeout)
File "/home/berker/projects/gunicorn/gunicorn/workers/sync.py", line 59, in run_for_one
listener = self.sockets[0]
IndexError: list index out of range
|
IndexError
|
def handle(self, listener, client, addr):
req = None
try:
parser = http.RequestParser(self.cfg, client)
try:
listener_name = listener.getsockname()
if not self.cfg.keepalive:
req = six.next(parser)
self.handle_request(listener_name, req, client, addr)
else:
# keepalive loop
proxy_protocol_info = {}
while True:
req = None
with self.timeout_ctx():
req = six.next(parser)
if not req:
break
if req.proxy_protocol_info:
proxy_protocol_info = req.proxy_protocol_info
else:
req.proxy_protocol_info = proxy_protocol_info
self.handle_request(listener_name, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except EnvironmentError:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except Exception as e:
self.handle_error(req, client, addr, e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client, addr, e)
except EnvironmentError as e:
if e.errno not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.errno == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client, addr, e)
finally:
util.close(client)
|
def handle(self, listener, client, addr):
req = None
try:
parser = http.RequestParser(self.cfg, client)
try:
listener_name = listener.getsockname()
if not self.cfg.keepalive:
req = six.next(parser)
self.handle_request(listener_name, req, client, addr)
else:
# keepalive loop
proxy_protocol_info = {}
while True:
req = None
with self.timeout_ctx():
req = six.next(parser)
if not req:
break
if req.proxy_protocol_info:
proxy_protocol_info = req.proxy_protocol_info
else:
req.proxy_protocol_info = proxy_protocol_info
self.handle_request(listener_name, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except socket.error:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except Exception as e:
self.handle_error(req, client, addr, e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client, addr, e)
except socket.error as e:
if e.args[0] not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.args[0] == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client, addr, e)
finally:
util.close(client)
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def handle_request(self, listener_name, req, sock, addr):
request_start = datetime.now()
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
resp, environ = wsgi.create(req, sock, addr, listener_name, self.cfg)
environ["wsgi.multithread"] = True
self.nr += 1
if self.alive and self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
resp.force_close()
self.alive = False
if not self.cfg.keepalive:
resp.force_close()
respiter = self.wsgi(environ, resp.start_response)
if respiter == ALREADY_HANDLED:
return False
try:
if isinstance(respiter, environ["wsgi.file_wrapper"]):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
finally:
if hasattr(respiter, "close"):
respiter.close()
if resp.should_close():
raise StopIteration()
except StopIteration:
raise
except EnvironmentError:
# If the original exception was a socket.error we delegate
# handling it to the caller (where handle() might ignore it)
six.reraise(*sys.exc_info())
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
sock.shutdown(socket.SHUT_RDWR)
sock.close()
except EnvironmentError:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")
return True
|
def handle_request(self, listener_name, req, sock, addr):
request_start = datetime.now()
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
resp, environ = wsgi.create(req, sock, addr, listener_name, self.cfg)
environ["wsgi.multithread"] = True
self.nr += 1
if self.alive and self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
resp.force_close()
self.alive = False
if not self.cfg.keepalive:
resp.force_close()
respiter = self.wsgi(environ, resp.start_response)
if respiter == ALREADY_HANDLED:
return False
try:
if isinstance(respiter, environ["wsgi.file_wrapper"]):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
finally:
if hasattr(respiter, "close"):
respiter.close()
if resp.should_close():
raise StopIteration()
except StopIteration:
raise
except socket.error:
# If the original exception was a socket.error we delegate
# handling it to the caller (where handle() might ignore it)
six.reraise(*sys.exc_info())
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
sock.shutdown(socket.SHUT_RDWR)
sock.close()
except socket.error:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")
return True
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def accept(self, listener):
try:
client, addr = listener.accept()
# initialize the connection object
conn = TConn(self.cfg, listener, client, addr)
self.nr_conns += 1
# enqueue the job
self.enqueue_req(conn)
except EnvironmentError as e:
if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, errno.EWOULDBLOCK):
raise
|
def accept(self, listener):
try:
client, addr = listener.accept()
# initialize the connection object
conn = TConn(self.cfg, listener, client, addr)
self.nr_conns += 1
# enqueue the job
self.enqueue_req(conn)
except socket.error as e:
if e.args[0] not in (errno.EAGAIN, errno.ECONNABORTED, errno.EWOULDBLOCK):
raise
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def murder_keepalived(self):
now = time.time()
while True:
with self._lock:
try:
# remove the connection from the queue
conn = self._keep.popleft()
except IndexError:
break
delta = conn.timeout - now
if delta > 0:
# add the connection back to the queue
with self._lock:
self._keep.appendleft(conn)
break
else:
self.nr_conns -= 1
# remove the socket from the poller
with self._lock:
try:
self.poller.unregister(conn.sock)
except EnvironmentError as e:
if e.errno != errno.EBADF:
raise
# close the socket
conn.close()
|
def murder_keepalived(self):
now = time.time()
while True:
with self._lock:
try:
# remove the connection from the queue
conn = self._keep.popleft()
except IndexError:
break
delta = conn.timeout - now
if delta > 0:
# add the connection back to the queue
with self._lock:
self._keep.appendleft(conn)
break
else:
self.nr_conns -= 1
# remove the socket from the poller
with self._lock:
try:
self.poller.unregister(conn.sock)
except socket.error as e:
if e.args[0] != errno.EBADF:
raise
# close the socket
conn.close()
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def handle(self, conn):
keepalive = False
req = None
try:
req = six.next(conn.parser)
if not req:
return (False, conn)
# handle the request
keepalive = self.handle_request(req, conn)
if keepalive:
return (keepalive, conn)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
conn.sock.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, conn.sock, conn.addr, e)
except EnvironmentError as e:
if e.errno not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.errno == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring connection epipe")
except Exception as e:
self.handle_error(req, conn.sock, conn.addr, e)
return (False, conn)
|
def handle(self, conn):
keepalive = False
req = None
try:
req = six.next(conn.parser)
if not req:
return (False, conn)
# handle the request
keepalive = self.handle_request(req, conn)
if keepalive:
return (keepalive, conn)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
conn.sock.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, conn.sock, conn.addr, e)
except socket.error as e:
if e.args[0] not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.args[0] == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring connection epipe")
except Exception as e:
self.handle_error(req, conn.sock, conn.addr, e)
return (False, conn)
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def handle_request(self, req, conn):
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
request_start = datetime.now()
resp, environ = wsgi.create(
req, conn.sock, conn.addr, conn.listener.getsockname(), self.cfg
)
environ["wsgi.multithread"] = True
self.nr += 1
if self.alive and self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
resp.force_close()
self.alive = False
if not self.cfg.keepalive:
resp.force_close()
elif len(self._keep) >= self.max_keepalived:
resp.force_close()
respiter = self.wsgi(environ, resp.start_response)
try:
if isinstance(respiter, environ["wsgi.file_wrapper"]):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
finally:
if hasattr(respiter, "close"):
respiter.close()
if resp.should_close():
self.log.debug("Closing connection.")
return False
except EnvironmentError:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
conn.sock.shutdown(socket.SHUT_RDWR)
conn.sock.close()
except EnvironmentError:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")
return True
|
def handle_request(self, req, conn):
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
request_start = datetime.now()
resp, environ = wsgi.create(
req, conn.sock, conn.addr, conn.listener.getsockname(), self.cfg
)
environ["wsgi.multithread"] = True
self.nr += 1
if self.alive and self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
resp.force_close()
self.alive = False
if not self.cfg.keepalive:
resp.force_close()
elif len(self._keep) >= self.max_keepalived:
resp.force_close()
respiter = self.wsgi(environ, resp.start_response)
try:
if isinstance(respiter, environ["wsgi.file_wrapper"]):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
finally:
if hasattr(respiter, "close"):
respiter.close()
if resp.should_close():
self.log.debug("Closing connection.")
return False
except socket.error:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
conn.sock.shutdown(socket.SHUT_RDWR)
conn.sock.close()
except socket.error:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")
return True
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def run_for_one(self, timeout):
listener = self.sockets[0]
while self.alive:
self.notify()
# Accept a connection. If we get an error telling us
# that no connection is waiting we fall down to the
# select which is where we'll wait for a bit for new
# workers to come give us some love.
try:
self.accept(listener)
# Keep processing clients until no one is waiting. This
# prevents the need to select() for every client that we
# process.
continue
except EnvironmentError as e:
if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, errno.EWOULDBLOCK):
raise
if not self.is_parent_alive():
return
try:
self.wait(timeout)
except StopWaiting:
return
|
def run_for_one(self, timeout):
listener = self.sockets[0]
while self.alive:
self.notify()
# Accept a connection. If we get an error telling us
# that no connection is waiting we fall down to the
# select which is where we'll wait for a bit for new
# workers to come give us some love.
try:
self.accept(listener)
# Keep processing clients until no one is waiting. This
# prevents the need to select() for every client that we
# process.
continue
except socket.error as e:
if e.args[0] not in (errno.EAGAIN, errno.ECONNABORTED, errno.EWOULDBLOCK):
raise
if not self.is_parent_alive():
return
try:
self.wait(timeout)
except StopWaiting:
return
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def run_for_multiple(self, timeout):
while self.alive:
self.notify()
try:
ready = self.wait(timeout)
except StopWaiting:
return
if ready is not None:
for listener in ready:
try:
self.accept(listener)
except EnvironmentError as e:
if e.errno not in (
errno.EAGAIN,
errno.ECONNABORTED,
errno.EWOULDBLOCK,
):
raise
if not self.is_parent_alive():
return
|
def run_for_multiple(self, timeout):
while self.alive:
self.notify()
try:
ready = self.wait(timeout)
except StopWaiting:
return
if ready is not None:
for listener in ready:
try:
self.accept(listener)
except socket.error as e:
if e.args[0] not in (
errno.EAGAIN,
errno.ECONNABORTED,
errno.EWOULDBLOCK,
):
raise
if not self.is_parent_alive():
return
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def handle(self, listener, client, addr):
req = None
try:
if self.cfg.is_ssl:
client = ssl.wrap_socket(client, server_side=True, **self.cfg.ssl_options)
parser = http.RequestParser(self.cfg, client)
req = six.next(parser)
self.handle_request(listener, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client, addr, e)
except EnvironmentError as e:
if e.errno not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.errno == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client, addr, e)
finally:
util.close(client)
|
def handle(self, listener, client, addr):
req = None
try:
if self.cfg.is_ssl:
client = ssl.wrap_socket(client, server_side=True, **self.cfg.ssl_options)
parser = http.RequestParser(self.cfg, client)
req = six.next(parser)
self.handle_request(listener, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client, addr, e)
except socket.error as e:
if e.args[0] not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.args[0] == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client, addr, e)
finally:
util.close(client)
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def handle_request(self, listener, req, client, addr):
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
request_start = datetime.now()
resp, environ = wsgi.create(req, client, addr, listener.getsockname(), self.cfg)
# Force the connection closed until someone shows
# a buffering proxy that supports Keep-Alive to
# the backend.
resp.force_close()
self.nr += 1
if self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
self.alive = False
respiter = self.wsgi(environ, resp.start_response)
try:
if isinstance(respiter, environ["wsgi.file_wrapper"]):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
finally:
if hasattr(respiter, "close"):
respiter.close()
except EnvironmentError:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
client.shutdown(socket.SHUT_RDWR)
client.close()
except EnvironmentError:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")
|
def handle_request(self, listener, req, client, addr):
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
request_start = datetime.now()
resp, environ = wsgi.create(req, client, addr, listener.getsockname(), self.cfg)
# Force the connection closed until someone shows
# a buffering proxy that supports Keep-Alive to
# the backend.
resp.force_close()
self.nr += 1
if self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
self.alive = False
respiter = self.wsgi(environ, resp.start_response)
try:
if isinstance(respiter, environ["wsgi.file_wrapper"]):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
finally:
if hasattr(respiter, "close"):
respiter.close()
except socket.error:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
client.shutdown(socket.SHUT_RDWR)
client.close()
except socket.error:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")
|
https://github.com/benoitc/gunicorn/issues/939
|
[2014-11-17 04:18:49 -0500] [13520] [ERROR] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.4/site-packages/gunicorn/workers/async.py", line 108, in handle_request
resp.write(item)
File "/usr/local/lib/python3.4/site-packages/gunicorn/http/wsgi.py", line 344, in write
util.write(self.sock, arg, self.chunked)
File "/usr/local/lib/python3.4/site-packages/gunicorn/util.py", line 301, in write
sock.sendall(data)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 375, in sendall
tail = self.send(data, flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 358, in send
total_sent += fd.send(data[total_sent:], flags)
File "/usr/local/lib/python3.4/site-packages/eventlet/greenio.py", line 351, in send
return fd.send(data, flags)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def _set_handler(self, log, output, fmt):
# remove previous gunicorn log handler
h = self._get_gunicorn_handler(log)
if h:
log.handlers.remove(h)
if output is not None:
if output == "-":
h = logging.StreamHandler()
else:
util.check_is_writeable(output)
h = logging.FileHandler(output)
# make sure the user can reopen the file
if not util.is_writable(h.baseFilename, self.cfg.user, self.cfg.group):
os.chown(h.baseFilename, self.cfg.user, self.cfg.group)
h.setFormatter(fmt)
h._gunicorn = True
log.addHandler(h)
|
def _set_handler(self, log, output, fmt):
# remove previous gunicorn log handler
h = self._get_gunicorn_handler(log)
if h:
log.handlers.remove(h)
if output is not None:
if output == "-":
h = logging.StreamHandler()
else:
util.check_is_writeable(output)
h = logging.FileHandler(output)
# make sure the user can reopen the file
os.chown(h.baseFilename, self.cfg.user, self.cfg.group)
h.setFormatter(fmt)
h._gunicorn = True
log.addHandler(h)
|
https://github.com/benoitc/gunicorn/issues/1157
|
Traceback (most recent call last):
File "/env/bin/gunicorn", line 11, in <module>
sys.exit(run())
File "/env/lib/python2.7/site-packages/gunicorn/app/wsgiapp.py", line 74, in run
WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]").run()
File "/env/lib/python2.7/site-packages/gunicorn/app/base.py", line 192, in run
super(Application, self).run()
File "/env/lib/python2.7/site-packages/gunicorn/app/base.py", line 72, in run
Arbiter(self).run()
File "/env/lib/python2.7/site-packages/gunicorn/arbiter.py", line 61, in __init__
self.setup(app)
File "/env/lib/python2.7/site-packages/gunicorn/arbiter.py", line 94, in setup
self.log = self.cfg.logger_class(app.cfg)
File "/env/lib/python2.7/site-packages/gunicorn/glogging.py", line 178, in __init__
self.setup(cfg)
File "/env/lib/python2.7/site-packages/gunicorn/glogging.py", line 192, in setup
fmt=logging.Formatter(self.access_fmt))
File "/env/lib/python2.7/site-packages/gunicorn/glogging.py", line 341, in _set_handler
os.chown(h.baseFilename, self.cfg.user, self.cfg.group)
OSError: [Errno 1] Operation not permitted: '/dev/null'
|
OSError
|
def _get_user(self, environ):
user = None
http_auth = environ.get("HTTP_AUTHORIZATION")
if http_auth and http_auth.startswith("Basic"):
auth = http_auth.split(" ", 1)
if len(auth) == 2:
try:
# b64decode doesn't accept unicode in Python < 3.3
# so we need to convert it to a byte string
auth = base64.b64decode(auth[1].strip().encode("utf-8"))
if PY3: # b64decode returns a byte string in Python 3
auth = auth.decode("utf-8")
auth = auth.split(":", 1)
except TypeError as exc:
self.debug("Couldn't get username: %s", exc)
return user
if len(auth) == 2:
user = auth[0]
return user
|
def _get_user(self, environ):
user = None
http_auth = environ.get("HTTP_AUTHORIZATION")
if http_auth:
auth = http_auth.split(" ", 1)
if len(auth) == 2:
try:
# b64decode doesn't accept unicode in Python < 3.3
# so we need to convert it to a byte string
auth = base64.b64decode(auth[1].strip().encode("utf-8"))
if PY3: # b64decode returns a byte string in Python 3
auth = auth.decode("utf-8")
auth = auth.split(":", 1)
except TypeError as exc:
self.debug("Couldn't get username: %s", exc)
return user
if len(auth) == 2:
user = auth[0]
return user
|
https://github.com/benoitc/gunicorn/issues/1148
|
Traceback (most recent call last):
File "/usr/src/deps/gunicorn/gunicorn/workers/async.py", line 116, in handle_request
self.log.access(resp, req, environ, request_time)
File "/usr/src/deps/gunicorn/gunicorn/glogging.py", line 287, in access
request_time))
File "/usr/src/deps/gunicorn/gunicorn/glogging.py", line 243, in atoms
'u': self._get_user(environ) or '-',
File "/usr/src/deps/gunicorn/gunicorn/glogging.py", line 392, in _get_user
auth = auth.decode('utf-8')
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xc2 in position 0: invalid continuation byte
|
UnicodeDecodeError
|
def parse_request_line(self, line):
bits = line.split(None, 2)
if len(bits) != 3:
raise InvalidRequestLine(line)
# Method
if not METH_RE.match(bits[0]):
raise InvalidRequestMethod(bits[0])
self.method = bits[0].upper()
# URI
# When the path starts with //, urlsplit considers it as a
# relative uri while the RDF says it shouldnt
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
# considers it as an absolute url.
# fix issue #297
if bits[1].startswith("//"):
self.uri = bits[1][1:]
else:
self.uri = bits[1]
try:
parts = urlsplit(self.uri)
except ValueError:
raise InvalidRequestLine(line)
self.path = parts.path or ""
self.query = parts.query or ""
self.fragment = parts.fragment or ""
# Version
match = VERSION_RE.match(bits[2])
if match is None:
raise InvalidHTTPVersion(bits[2])
self.version = (int(match.group(1)), int(match.group(2)))
|
def parse_request_line(self, line):
bits = line.split(None, 2)
if len(bits) != 3:
raise InvalidRequestLine(line)
# Method
if not METH_RE.match(bits[0]):
raise InvalidRequestMethod(bits[0])
self.method = bits[0].upper()
# URI
# When the path starts with //, urlsplit considers it as a
# relative uri while the RDF says it shouldnt
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
# considers it as an absolute url.
# fix issue #297
if bits[1].startswith("//"):
self.uri = bits[1][1:]
else:
self.uri = bits[1]
parts = urlsplit(self.uri)
self.path = parts.path or ""
self.query = parts.query or ""
self.fragment = parts.fragment or ""
# Version
match = VERSION_RE.match(bits[2])
if match is None:
raise InvalidHTTPVersion(bits[2])
self.version = (int(match.group(1)), int(match.group(2)))
|
https://github.com/benoitc/gunicorn/issues/1023
|
(tempenv-027621611529f) ~/.v/tempenv-701523535780 $ gunicorn w:f
[2015-05-09 11:53:50 -0400] [75550] [INFO] Starting gunicorn 19.3.0
[2015-05-09 11:53:50 -0400] [75550] [INFO] Listening at: http://127.0.0.1:8000 (75550)
[2015-05-09 11:53:50 -0400] [75550] [INFO] Using worker: sync
[2015-05-09 11:53:50 -0400] [75567] [INFO] Booting worker with pid: 75567
[2015-05-09 11:54:44 -0400] [75567] [ERROR] Error handling request
Traceback (most recent call last):
File "/Users/alex_gaynor/.virtualenvs/tempenv-027621611529f/lib/python2.7/site-packages/gunicorn/workers/sync.py", line 129, in handle
req = six.next(parser)
File "/Users/alex_gaynor/.virtualenvs/tempenv-027621611529f/lib/python2.7/site-packages/gunicorn/http/parser.py", line 41, in __next__
self.mesg = self.mesg_class(self.cfg, self.unreader, self.req_count)
File "/Users/alex_gaynor/.virtualenvs/tempenv-027621611529f/lib/python2.7/site-packages/gunicorn/http/message.py", line 153, in __init__
super(Request, self).__init__(cfg, unreader)
File "/Users/alex_gaynor/.virtualenvs/tempenv-027621611529f/lib/python2.7/site-packages/gunicorn/http/message.py", line 53, in __init__
unused = self.parse(self.unreader)
File "/Users/alex_gaynor/.virtualenvs/tempenv-027621611529f/lib/python2.7/site-packages/gunicorn/http/message.py", line 177, in parse
self.parse_request_line(bytes_to_str(line))
File "/Users/alex_gaynor/.virtualenvs/tempenv-027621611529f/lib/python2.7/site-packages/gunicorn/http/message.py", line 326, in parse_request_line
parts = urlsplit(self.uri)
File "/Users/alex_gaynor/.pyenv/versions/2.7.9/lib/python2.7/urlparse.py", line 214, in urlsplit
raise ValueError("Invalid IPv6 URL")
ValueError: Invalid IPv6 URL
|
ValueError
|
def handle(self, listener, client, addr):
req = None
try:
parser = http.RequestParser(self.cfg, client)
try:
listener_name = listener.getsockname()
if not self.cfg.keepalive:
req = six.next(parser)
self.handle_request(listener_name, req, client, addr)
else:
# keepalive loop
proxy_protocol_info = {}
while True:
req = None
with self.timeout_ctx():
req = six.next(parser)
if not req:
break
if req.proxy_protocol_info:
proxy_protocol_info = req.proxy_protocol_info
else:
req.proxy_protocol_info = proxy_protocol_info
self.handle_request(listener_name, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except socket.error:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except Exception as e:
self.handle_error(req, client, addr, e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client, addr, e)
except socket.error as e:
if e.args[0] not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.args[0] == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client, addr, e)
finally:
util.close(client)
|
def handle(self, listener, client, addr):
req = None
try:
parser = http.RequestParser(self.cfg, client)
try:
listener_name = listener.getsockname()
if not self.cfg.keepalive:
req = six.next(parser)
self.handle_request(listener_name, req, client, addr)
else:
# keepalive loop
proxy_protocol_info = req.proxy_protocol_info
while True:
req = None
with self.timeout_ctx():
req = six.next(parser)
if not req:
break
req.proxy_protocol_info = proxy_protocol_info
self.handle_request(listener_name, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except socket.error:
exc_info = sys.exc_info()
# pass to next try-except level
six.reraise(exc_info[0], exc_info[1], exc_info[2])
except Exception as e:
self.handle_error(req, client, addr, e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client, addr, e)
except socket.error as e:
if e.args[0] not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.args[0] == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client, addr, e)
finally:
util.close(client)
|
https://github.com/benoitc/gunicorn/issues/923
|
[2014-10-27 20:36:55 +0000] [22663] [ERROR] Error handling request
Traceback (most recent call last):
File "/mnt/runscope/.virtualenvs/embedcurl/src/gunicorn/gunicorn/workers/async.py", line 41, in handle
proxy_protocol_info = req.proxy_protocol_info
AttributeError: 'NoneType' object has no attribute 'proxy_protocol_info'
|
AttributeError
|
def murder_keepalived(self):
now = time.time()
while True:
try:
# remove the connection from the queue
conn = self._keep.popleft()
except IndexError:
break
delta = conn.timeout - now
if delta > 0:
# add the connection back to the queue
self._keep.appendleft(conn)
break
else:
# remove the socket from the poller
self.poller.unregister(conn.sock)
# close the socket
util.close(conn.sock)
|
def murder_keepalived(self):
now = time.time()
while True:
try:
conn = self._keep.popleft()
except IndexError:
break
delta = conn.timeout - now
if delta > 0:
self._keep.appendleft(conn)
break
else:
# remove the connection from the queue
conn = self._keep.popleft()
# remove the socket from the poller
self.poller.unregister(conn.sock)
# close the socket
util.close(conn.sock)
|
https://github.com/benoitc/gunicorn/issues/816
|
(wsgiapps)sontek@aladdin:~/src/wsgiapps/pyramid_hello$ gunicorn --paste development.ini --log-file=-
2014-07-12 00:10:42 [4752] [INFO] Starting gunicorn 19.0.0
2014-07-12 00:10:42 [4752] [INFO] Listening at: http://0.0.0.0:8000 (4752)
2014-07-12 00:10:42 [4752] [INFO] Using worker: threads
2014-07-12 00:10:42 [4803] [INFO] Booting worker with pid: 4803
2014-07-12 00:10:42 [4806] [INFO] Booting worker with pid: 4806
2014-07-12 00:10:42 [4831] [INFO] Booting worker with pid: 4831
2014-07-12 00:10:42 [4844] [INFO] Booting worker with pid: 4844
2014-07-12 00:10:42 [4885] [INFO] Booting worker with pid: 4885
2014-07-12 00:10:46 [4885] [ERROR] Exception in worker process:
Traceback (most recent call last):
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/arbiter.py", line 502, in spawn_worker
worker.init_process()
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/workers/gthread.py", line 109, in init_process
super(ThreadWorker, self).init_process()
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/workers/base.py", line 120, in init_process
self.run()
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/workers/gthread.py", line 178, in run
self.murder_keepalived()
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/workers/gthread.py", line 147, in murder_keepalived
conn = self._keep.popleft()
IndexError: pop from an empty deque
Traceback (most recent call last):
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/arbiter.py", line 502, in spawn_worker
worker.init_process()
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/workers/gthread.py", line 109, in init_process
super(ThreadWorker, self).init_process()
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/workers/base.py", line 120, in init_process
self.run()
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/workers/gthread.py", line 178, in run
self.murder_keepalived()
File "/home/sontek/venvs/wsgiapps/local/lib/python2.7/site-packages/gunicorn/workers/gthread.py", line 147, in murder_keepalived
conn = self._keep.popleft()
IndexError: pop from an empty deque
|
IndexError
|
def app(environ, start_response):
"""Framework Engine"""
os.environ.setdefault("REQUEST_METHOD", environ["REQUEST_METHOD"])
os.environ.setdefault("URI_PATH", environ["PATH_INFO"])
router = Route(environ)
if router.is_post():
# Set POST parameters given into the query string to centralize parsing and retrieval.
# This was made to directly support the Request.input() method.
# There is no known reason to differentiate between GET and POST when retrieving
# form paramters. This line below simply puts both POST form params in the
# same query_string as GET params.
environ["QUERY_STRING"] = router.set_post_params()
# Get all routes from the routes/web.py file
import routes.web
routes = routes.web.ROUTES
# Instantiate the Request object.
# This is the `request` object passed into controller methods
request = Request(environ)
# Check all http routes
for route in routes:
# Compiles the given route to regex
regex = router.compile_route_to_regex(route)
# Make a better match trailing slashes `/` at the end of routes
# Sometimes a user will end with a trailing slash. Because the
# user may create routes like `/url/route` and `/url/route/`
# and how the regex is compiled down, we may need to adjust for
# urls that end or dont end with a trailing slash. This is sort
# of a quirk and could possibly be fixed.
if route.route_url.endswith("/"):
matchurl = re.compile(regex.replace(r"\/\/$", r"\/$"))
else:
matchurl = re.compile(regex.replace(r"\/$", r"$"))
# This will create a dictionary of paramters given. This is a sort of a short
# but complex way to retrieve the url paramters. This is the code used to
# convert /url/@firstname/@lastname to {'firstmane': 'joseph', 'lastname': 'mancuso'}
try:
parameter_dict = {}
for index, value in enumerate(matchurl.match(router.url).groups()):
parameter_dict[router.generated_url_list()[index]] = value
request.set_params(parameter_dict)
except AttributeError:
pass
# If the route url matches the regex and the method type and the route can continue
# (will be used with middleware later)
if (
matchurl.match(router.url)
and route.method_type == environ["REQUEST_METHOD"]
and route.continueroute is True
):
# Execute HTTP Before Middleware
# for http_middleware in middleware.HTTP_MIDDLEWARE:
# if hasattr(http_middleware, 'before'):
# http_middleware(request).before()
for http_middleware in middleware.HTTP_MIDDLEWARE:
located_middleware = locate(http_middleware)
if hasattr(located_middleware, "before"):
located_middleware(request).before()
# Show a helper in the terminal of which route has been visited
print(route.method_type + " Route: " + router.url)
# Loads the request in so the middleware specified is able to use the
# request object. This is before middleware and is ran before the request
route.load_request(request).run_middleware("before")
# Get the data from the route. This data is typically the output
# of the controller method
if not request.redirect_url:
data = router.get(route.route, route.output(request))
# Loads the request in so the middleware specified is able to use the
# request object. This is after middleware and is ran after the request
route.load_request(request).run_middleware("after")
# Execute HTTP After Middleware
for http_middleware in middleware.HTTP_MIDDLEWARE:
located_middleware = locate(http_middleware)
if hasattr(located_middleware, "after"):
located_middleware(request).after()
break
else:
data = "Route not found. Error 404"
## Check all API Routes
if data == "Route not found. Error 404":
# Look at the API routes files
import routes.api
routes = routes.api.ROUTES
for route in routes:
# If the base url matches
if route.url in router.url:
data = route.fetch(request).output
if data:
break
else:
data = "Route not found. Error 404"
else:
data = "Route not found. Error 404"
# Set redirection if a route has been called to redirect to.
# redirect_route is a property set on the request object when
# methods like request.redirectTo('nameRoute') are called
if request.redirect_route:
for route in routes:
if route.named_route == request.redirect_route:
print(route.method_type + " Named Route: " + router.url)
data = router.get(route.route, route.output(request))
request.redirect_url = route.route_url
# Convert the data that is retrieved above to bytes so the wsgi server can handle it.
data = bytes(data, "utf-8")
if not request.redirect_url:
# Convert the data that is retrieved above to bytes so the wsgi server can handle it.
data = bytes(data, "utf-8")
# Normal HTTP response.
start_response(
"200 OK",
[
("Content-Type", "text/html; charset=utf-8"),
("Content-Length", str(len(data))),
]
+ request.get_cookies(),
)
else:
data = bytes("redirecting ..", "utf-8")
# Redirection. In order to redirect the response types need to be 302 instead of 200
start_response(
"302 OK",
[("Location", request.compile_route_to_url())] + request.get_cookies(),
)
# This will output the data to the browser.
return iter([data])
|
def app(environ, start_response):
"""Framework Engine"""
os.environ.setdefault("REQUEST_METHOD", environ["REQUEST_METHOD"])
os.environ.setdefault("URI_PATH", environ["PATH_INFO"])
router = Route(environ)
if router.is_post():
# Set POST parameters given into the query string to centralize parsing and retrieval.
# This was made to directly support the Request.input() method.
# There is no known reason to differentiate between GET and POST when retrieving
# form paramters. This line below simply puts both POST form params in the
# same query_string as GET params.
environ["QUERY_STRING"] = router.set_post_params()
# Get all routes from the routes/web.py file
import routes.web
routes = routes.web.ROUTES
# Instantiate the Request object.
# This is the `request` object passed into controller methods
request = Request(environ)
# Check all http routes
for route in routes:
# Compiles the given route to regex
regex = router.compile_route_to_regex(route)
# Make a better match trailing slashes `/` at the end of routes
# Sometimes a user will end with a trailing slash. Because the
# user may create routes like `/url/route` and `/url/route/`
# and how the regex is compiled down, we may need to adjust for
# urls that end or dont end with a trailing slash. This is sort
# of a quirk and could possibly be fixed.
if route.route_url.endswith("/"):
matchurl = re.compile(regex.replace(r"\/\/$", r"\/$"))
else:
matchurl = re.compile(regex.replace(r"\/$", r"$"))
# This will create a dictionary of paramters given. This is a sort of a short
# but complex way to retrieve the url paramters. This is the code used to
# convert /url/@firstname/@lastname to {'firstmane': 'joseph', 'lastname': 'mancuso'}
try:
parameter_dict = {}
for index, value in enumerate(matchurl.match(router.url).groups()):
parameter_dict[router.generated_url_list()[index]] = value
request.set_params(parameter_dict)
except AttributeError:
pass
# If the route url matches the regex and the method type and the route can continue
# (will be used with middleware later)
if (
matchurl.match(router.url)
and route.method_type == environ["REQUEST_METHOD"]
and route.continueroute is True
):
# Execute HTTP Before Middleware
# for http_middleware in middleware.HTTP_MIDDLEWARE:
# if hasattr(http_middleware, 'before'):
# http_middleware(request).before()
for http_middleware in middleware.HTTP_MIDDLEWARE:
located_middleware = locate(http_middleware)
if hasattr(located_middleware, "before"):
located_middleware(request).before()
# Show a helper in the terminal of which route has been visited
print(route.method_type + " Route: " + router.url)
# Loads the request in so the middleware specified is able to use the
# request object. This is before middleware and is ran before the request
route.load_request(request).run_middleware("before")
# Get the data from the route. This data is typically the output
# of the controller method
data = router.get(route.route, route.output(request))
# Loads the request in so the middleware specified is able to use the
# request object. This is after middleware and is ran after the request
route.load_request(request).run_middleware("after")
# Execute HTTP After Middleware
for http_middleware in middleware.HTTP_MIDDLEWARE:
located_middleware = locate(http_middleware)
if hasattr(located_middleware, "after"):
located_middleware(request).after()
break
else:
data = "Route not found. Error 404"
## Check all API Routes
if data == "Route not found. Error 404":
# Look at the API routes files
import routes.api
routes = routes.api.ROUTES
for route in routes:
# If the base url matches
if route.url in router.url:
data = route.fetch(request).output
if data:
break
else:
data = "Route not found. Error 404"
else:
data = "Route not found. Error 404"
# Set redirection if a route has been called to redirect to.
# redirect_route is a property set on the request object when
# methods like request.redirectTo('nameRoute') are called
if request.redirect_route:
for route in routes:
if route.named_route == request.redirect_route:
print(route.method_type + " Named Route: " + router.url)
data = router.get(route.route, route.output(request))
request.redirect_url = route.route_url
# Convert the data that is retrieved above to bytes so the wsgi server can handle it.
data = bytes(data, "utf-8")
if not request.redirect_url:
# Normal HTTP response.
start_response(
"200 OK",
[
("Content-Type", "text/html; charset=utf-8"),
("Content-Length", str(len(data))),
]
+ request.get_cookies(),
)
else:
# Redirection. In order to redirect the response types need to be 302 instead of 200
start_response(
"302 OK",
[("Location", request.compile_route_to_url())] + request.get_cookies(),
)
# This will output the data to the browser.
return iter([data])
|
https://github.com/MasoniteFramework/masonite/issues/63
|
ERROR:waitress:Exception when serving /logout
Traceback (most recent call last):
File "/Users/mancuso/Programming/cleanblog/venv/lib/python3.6/site-packages/waitress/channel.py", line 338, in service
task.service()
File "/Users/mancuso/Programming/cleanblog/venv/lib/python3.6/site-packages/waitress/task.py", line 169, in service
self.execute()
File "/Users/mancuso/Programming/cleanblog/venv/lib/python3.6/site-packages/waitress/task.py", line 399, in execute
app_iter = self.channel.server.application(env, start_response)
File "/Users/mancuso/Programming/cleanblog/venv/lib/python3.6/site-packages/whitenoise/base.py", line 66, in __call__
return self.application(environ, start_response)
File "/Users/mancuso/Programming/cleanblog/bootstrap/start.py", line 138, in app
data = bytes(data, 'utf-8')
TypeError: encoding without a string argument
|
TypeError
|
def open(cls, grammar_filename, rel_to=None, **options):
"""Create an instance of Lark with the grammar given by its filename
If rel_to is provided, the function will find the grammar filename in relation to it.
Example:
>>> Lark.open("grammar_file.lark", rel_to=__file__, parser="lalr")
Lark(...)
"""
if rel_to:
basepath = os.path.dirname(rel_to)
grammar_filename = os.path.join(basepath, grammar_filename)
with open(grammar_filename, encoding="utf8") as f:
return cls(f, **options)
|
def open(cls, grammar_filename, rel_to=None, **options):
"""Create an instance of Lark with the grammar given by its filename
If rel_to is provided, the function will find the grammar filename in relation to it.
Example:
>>> Lark.open("grammar_file.lark", rel_to=__file__, parser="lalr")
Lark(...)
"""
if rel_to:
basepath = os.path.dirname(rel_to)
grammar_filename = os.path.join(basepath, grammar_filename)
with open(grammar_filename) as f:
return cls(f, **options)
|
https://github.com/lark-parser/lark/issues/179
|
Traceback (most recent call last):
File "d:\lark-parser\lark\lexer.py", line 221, in __init__
lexer = lexer_by_tokens[key]
KeyError: frozenset({Token(TERMINAL, '__ANON_0'), 'grammar'})
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "d:\lark-parser\lark\lexer.py", line 179, in __init__
re.compile(t.pattern.to_regexp())
File "F:\Python\lib\re.py", line 233, in compile
return _compile(pattern, flags)
File "F:\Python\lib\re.py", line 301, in _compile
p = sre_compile.compile(pattern, flags)
File "F:\Python\lib\sre_compile.py", line 562, in compile
p = sre_parse.parse(p, flags)
File "F:\Python\lib\sre_parse.py", line 855, in parse
p = _parse_sub(source, pattern, flags & SRE_FLAG_VERBOSE, 0)
File "F:\Python\lib\sre_parse.py", line 416, in _parse_sub
not nested and not items))
File "F:\Python\lib\sre_parse.py", line 553, in _parse
raise source.error(msg, len(this) + 1 + len(that))
sre_constants.error: bad character range ˜-Ã at position 5
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\no_ambiguous.py", line 11, in <module>
_parser = lark.Lark.open( "parser.lark", start='grammar', parser='lalr' )
File "d:\lark-parser\lark\lark.py", line 206, in open
return cls(f, **options)
File "d:\lark-parser\lark\lark.py", line 165, in __init__
self.parser = self._build_parser()
File "d:\lark-parser\lark\lark.py", line 188, in _build_parser
return self.parser_class(self.lexer_conf, parser_conf, options=self.options)
File "d:\lark-parser\lark\parser_frontends.py", line 46, in __init__
self.init_contextual_lexer(lexer_conf, parser_conf)
File "d:\lark-parser\lark\parser_frontends.py", line 23, in init_contextual_lexer
user_callbacks=lexer_conf.callbacks)
File "d:\lark-parser\lark\lexer.py", line 225, in __init__
lexer = Lexer(state_tokens, ignore=ignore, user_callbacks=user_callbacks)
File "d:\lark-parser\lark\lexer.py", line 181, in __init__
raise LexError("Cannot compile token %s: %s" % (t.name, t.pattern))
NameError: name 'LexError' is not defined
|
KeyError
|
def parse(self, stream, start_symbol=None):
# Define parser functions
start_symbol = start_symbol or self.start_symbol
delayed_matches = defaultdict(list)
text_line = 1
text_column = 0
def predict(nonterm, column):
assert not isinstance(nonterm, Terminal), nonterm
return [Item(rule, 0, column, None) for rule in self.predictions[nonterm]]
def complete(item):
name = item.rule.origin
return [i.advance(item.tree) for i in item.start.to_predict if i.expect == name]
def predict_and_complete(column):
while True:
to_predict = {
x.expect for x in column.to_predict.get_news() if x.ptr
} # if not part of an already predicted batch
to_reduce = column.to_reduce.get_news()
if not (to_predict or to_reduce):
break
for nonterm in to_predict:
column.add(predict(nonterm, column))
for item in to_reduce:
new_items = list(complete(item))
for new_item in new_items:
if new_item.similar(item):
raise ParseError(
"Infinite recursion detected! (rule %s)" % new_item.rule
)
column.add(new_items)
def scan(i, token, column):
to_scan = column.to_scan.get_news()
for x in self.ignore:
m = x.match(stream, i)
if m:
# TODO add partial matches for ignore too?
delayed_matches[m.end()] += to_scan
for item in to_scan:
m = item.expect.match(stream, i)
if m:
t = Token(item.expect.name, m.group(0), i, text_line, text_column)
delayed_matches[m.end()].append(item.advance(t))
s = m.group(0)
for j in range(1, len(s)):
m = item.expect.match(s[:-j])
if m:
delayed_matches[m.end()].append(item.advance(m.group(0)))
next_set = Column(i + 1)
next_set.add(delayed_matches[i + 1])
del delayed_matches[i + 1] # No longer needed, so unburden memory
return next_set
# Main loop starts
column0 = Column(0)
column0.add(predict(start_symbol, column0))
column = column0
for i, token in enumerate(stream):
predict_and_complete(column)
column = scan(i, token, column)
if token == "\n":
text_line += 1
text_column = 0
else:
text_column += 1
predict_and_complete(column)
# Parse ended. Now build a parse tree
solutions = [
n.tree
for n in column.to_reduce
if n.rule.origin == start_symbol and n.start is column0
]
if not solutions:
raise ParseError("Incomplete parse: Could not find a solution to input")
elif len(solutions) == 1:
tree = solutions[0]
else:
tree = Tree("_ambig", solutions)
if self.resolve_ambiguity:
tree = self.resolve_ambiguity(tree)
return ApplyCallbacks(self.postprocess).transform(tree)
|
def parse(self, stream, start_symbol=None):
# Define parser functions
start_symbol = start_symbol or self.start_symbol
delayed_matches = defaultdict(list)
text_line = 1
text_column = 0
def predict(nonterm, column):
assert not isinstance(nonterm, Terminal), nonterm
return [Item(rule, 0, column, None) for rule in self.predictions[nonterm]]
def complete(item):
name = item.rule.origin
return [i.advance(item.tree) for i in item.start.to_predict if i.expect == name]
def predict_and_complete(column):
while True:
to_predict = {
x.expect for x in column.to_predict.get_news() if x.ptr
} # if not part of an already predicted batch
to_reduce = column.to_reduce.get_news()
if not (to_predict or to_reduce):
break
for nonterm in to_predict:
column.add(predict(nonterm, column))
for item in to_reduce:
new_items = list(complete(item))
for new_item in new_items:
if new_item.similar(item):
raise ParseError(
"Infinite recursion detected! (rule %s)" % new_item.rule
)
column.add(new_items)
def scan(i, token, column):
for x in self.ignore:
m = x.match(stream, i)
if m:
return column
to_scan = column.to_scan.get_news()
for item in to_scan:
m = item.expect.match(stream, i)
if m:
t = Token(item.expect.name, m.group(0), i, text_line, text_column)
delayed_matches[m.end()].append(item.advance(t))
s = m.group(0)
for j in range(1, len(s)):
m = item.expect.match(s[:-j])
if m:
delayed_matches[m.end()].append(item.advance(m.group(0)))
next_set = Column(i + 1)
next_set.add(delayed_matches[i + 1])
del delayed_matches[i + 1] # No longer needed, so unburden memory
return next_set
# Main loop starts
column0 = Column(0)
column0.add(predict(start_symbol, column0))
column = column0
for i, token in enumerate(stream):
predict_and_complete(column)
column = scan(i, token, column)
if token == "\n":
text_line += 1
text_column = 0
else:
text_column += 1
predict_and_complete(column)
# Parse ended. Now build a parse tree
solutions = [
n.tree
for n in column.to_reduce
if n.rule.origin == start_symbol and n.start is column0
]
if not solutions:
raise ParseError("Incomplete parse: Could not find a solution to input")
elif len(solutions) == 1:
tree = solutions[0]
else:
tree = Tree("_ambig", solutions)
if self.resolve_ambiguity:
tree = self.resolve_ambiguity(tree)
return ApplyCallbacks(self.postprocess).transform(tree)
|
https://github.com/lark-parser/lark/issues/24
|
Traceback (most recent call last):
File "lark_earley_bug.py", line 18, in <module>
tree = parser.parse("int 1 ! This is a comment\n")
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/lark/lark.py", line 193, in parse
return self.parser.parse(text)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/lark/parser_frontends.py", line 144, in parse
return self.parser.parse(text)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/lark/parsers/xearley.py", line 128, in parse
raise ParseError('Incomplete parse: Could not find a solution to input')
lark.common.ParseError: Incomplete parse: Could not find a solution to input
|
lark.common.ParseError
|
def scan(i, token, column):
to_scan = column.to_scan.get_news()
for x in self.ignore:
m = x.match(stream, i)
if m:
# TODO add partial matches for ignore too?
delayed_matches[m.end()] += to_scan
for item in to_scan:
m = item.expect.match(stream, i)
if m:
t = Token(item.expect.name, m.group(0), i, text_line, text_column)
delayed_matches[m.end()].append(item.advance(t))
s = m.group(0)
for j in range(1, len(s)):
m = item.expect.match(s[:-j])
if m:
delayed_matches[m.end()].append(item.advance(m.group(0)))
next_set = Column(i + 1)
next_set.add(delayed_matches[i + 1])
del delayed_matches[i + 1] # No longer needed, so unburden memory
return next_set
|
def scan(i, token, column):
for x in self.ignore:
m = x.match(stream, i)
if m:
return column
to_scan = column.to_scan.get_news()
for item in to_scan:
m = item.expect.match(stream, i)
if m:
t = Token(item.expect.name, m.group(0), i, text_line, text_column)
delayed_matches[m.end()].append(item.advance(t))
s = m.group(0)
for j in range(1, len(s)):
m = item.expect.match(s[:-j])
if m:
delayed_matches[m.end()].append(item.advance(m.group(0)))
next_set = Column(i + 1)
next_set.add(delayed_matches[i + 1])
del delayed_matches[i + 1] # No longer needed, so unburden memory
return next_set
|
https://github.com/lark-parser/lark/issues/24
|
Traceback (most recent call last):
File "lark_earley_bug.py", line 18, in <module>
tree = parser.parse("int 1 ! This is a comment\n")
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/lark/lark.py", line 193, in parse
return self.parser.parse(text)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/lark/parser_frontends.py", line 144, in parse
return self.parser.parse(text)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/lark/parsers/xearley.py", line 128, in parse
raise ParseError('Incomplete parse: Could not find a solution to input')
lark.common.ParseError: Incomplete parse: Could not find a solution to input
|
lark.common.ParseError
|
def _identify_local_rank(self):
if "SLURM_JOBID" in os.environ:
os.environ["LOCAL_RANK"] = os.environ["SLURM_LOCALID"]
if "LOCAL_RANK" in os.environ:
self._local_rank = int(os.environ["LOCAL_RANK"])
elif self._ext_local_rank is not None:
self._local_rank = self._ext_local_rank
else:
warnings.warn(
"Local rank information for native distributed setting will be initialized using "
"heuristic approach based on hostname which can be different of real setup. Please, "
"either set `os.environ['LOCAL_RANK']` "
"or use `idist.set_local_rank(local_rank)` with correct local rank index."
)
# use socket gethostname heuristic to determine number of nodes => local rank
self._local_rank = self._compute_local_rank_via_hostname()
|
def _identify_local_rank(self):
if "SLURM_JOBID" in os.environ:
os.environ["LOCAL_RANK"] = os.environ["SLURM_LOCALID"]
if "LOCAL_RANK" in os.environ:
self._local_rank = int(os.environ["LOCAL_RANK"])
elif self._ext_local_rank is not None:
self._local_rank = self._ext_local_rank
else:
warnings.warn(
"Local rank information for native distributed setting will be initialized using heuristic approach "
"based on hostname which can be different of real setup. Please, either set `os.environ['LOCAL_RANK']` "
"ro use `idist.set_local_rank(local_rank)` with correct local rank index."
)
# use socket gethostname heuristic to determine number of nodes => local rank
self._local_rank = self._compute_local_rank_via_hostname()
|
https://github.com/pytorch/ignite/issues/1110
|
import ignite
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\__init__.py", line 2, in <module>
import ignite.distributed
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\__init__.py", line 1, in <module>
from ignite.distributed.utils import *
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\utils.py", line 9, in <module>
from ignite.distributed.comp_models import _SerialModel, has_xla_support, registered_computation_models
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\__init__.py", line 2, in <module>
from ignite.distributed.comp_models.native import _NativeDistModel
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\native.py", line 13, in <module>
class _NativeDistModel(ComputationModel):
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\native.py", line 33, in _NativeDistModel
for name in [dist.Backend.NCCL, dist.Backend.GLOO, dist.Backend.MPI]
AttributeError: module 'torch.distributed' has no attribute 'Backend'
|
AttributeError
|
def create_from_context() -> Optional["_XlaDistModel"]:
return _XlaDistModel()
|
def create_from_context() -> Optional["_XlaDistModel"]:
if not has_xla_support:
return None
return _XlaDistModel()
|
https://github.com/pytorch/ignite/issues/1110
|
import ignite
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\__init__.py", line 2, in <module>
import ignite.distributed
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\__init__.py", line 1, in <module>
from ignite.distributed.utils import *
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\utils.py", line 9, in <module>
from ignite.distributed.comp_models import _SerialModel, has_xla_support, registered_computation_models
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\__init__.py", line 2, in <module>
from ignite.distributed.comp_models.native import _NativeDistModel
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\native.py", line 13, in <module>
class _NativeDistModel(ComputationModel):
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\native.py", line 33, in _NativeDistModel
for name in [dist.Backend.NCCL, dist.Backend.GLOO, dist.Backend.MPI]
AttributeError: module 'torch.distributed' has no attribute 'Backend'
|
AttributeError
|
def create_from_backend(backend: str = "xla-tpu", **kwargs) -> "_XlaDistModel":
return _XlaDistModel(backend=backend, **kwargs)
|
def create_from_backend(backend: str = "xla-tpu", **kwargs) -> "_XlaDistModel":
if not has_xla_support:
raise RuntimeError("Torch xla package is not installed.")
return _XlaDistModel(backend=backend, **kwargs)
|
https://github.com/pytorch/ignite/issues/1110
|
import ignite
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\__init__.py", line 2, in <module>
import ignite.distributed
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\__init__.py", line 1, in <module>
from ignite.distributed.utils import *
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\utils.py", line 9, in <module>
from ignite.distributed.comp_models import _SerialModel, has_xla_support, registered_computation_models
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\__init__.py", line 2, in <module>
from ignite.distributed.comp_models.native import _NativeDistModel
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\native.py", line 13, in <module>
class _NativeDistModel(ComputationModel):
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\native.py", line 33, in _NativeDistModel
for name in [dist.Backend.NCCL, dist.Backend.GLOO, dist.Backend.MPI]
AttributeError: module 'torch.distributed' has no attribute 'Backend'
|
AttributeError
|
def spawn(
fn: Callable,
args: Tuple,
kwargs_dict: Optional[Mapping] = None,
num_procs_per_node: int = 1,
num_nodes: int = 1,
node_rank: int = 0,
backend: str = "xla-tpu",
**kwargs,
):
import os
if "COLAB_TPU_ADDR" in os.environ:
kwargs["start_method"] = "fork"
xmp.spawn(
_XlaDistModel._dist_worker_task_fn,
args=(backend, fn, args, kwargs_dict),
nprocs=num_procs_per_node,
**kwargs,
)
|
def spawn(
fn: Callable,
args: Tuple,
kwargs_dict: Optional[Mapping] = None,
num_procs_per_node: int = 1,
num_nodes: int = 1,
node_rank: int = 0,
backend: str = "xla-tpu",
**kwargs,
):
if not has_xla_support:
raise RuntimeError("Torch xla package is not installed.")
import os
if "COLAB_TPU_ADDR" in os.environ:
kwargs["start_method"] = "fork"
xmp.spawn(
_XlaDistModel._dist_worker_task_fn,
args=(backend, fn, args, kwargs_dict),
nprocs=num_procs_per_node,
**kwargs,
)
|
https://github.com/pytorch/ignite/issues/1110
|
import ignite
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\__init__.py", line 2, in <module>
import ignite.distributed
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\__init__.py", line 1, in <module>
from ignite.distributed.utils import *
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\utils.py", line 9, in <module>
from ignite.distributed.comp_models import _SerialModel, has_xla_support, registered_computation_models
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\__init__.py", line 2, in <module>
from ignite.distributed.comp_models.native import _NativeDistModel
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\native.py", line 13, in <module>
class _NativeDistModel(ComputationModel):
File "C:\Users\Foo\anaconda3\envs\ignite\lib\site-packages\ignite\distributed\comp_models\native.py", line 33, in _NativeDistModel
for name in [dist.Backend.NCCL, dist.Backend.GLOO, dist.Backend.MPI]
AttributeError: module 'torch.distributed' has no attribute 'Backend'
|
AttributeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.