after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def housekeeping(self, expired_threshold, info_threshold):
# delete 'closed' or 'expired' alerts older than "expired_threshold" hours
# and 'informational' alerts older than "info_threshold" hours
delete = """
DELETE FROM alerts
WHERE (status IN ('closed', 'expired')
AND last_receive_time < (NOW() at time zone 'utc' - INTERVAL '%(expired_threshold)s hours'))
OR (severity='informational'
AND last_receive_time < (NOW() at time zone 'utc' - INTERVAL '%(info_threshold)s hours'))
"""
self._delete(
delete,
{"expired_threshold": expired_threshold, "info_threshold": info_threshold},
)
# get list of alerts to be newly expired
select = """
SELECT id, event, last_receive_id
FROM alerts
WHERE status NOT IN ('expired','shelved') AND timeout!=0
AND (last_receive_time + INTERVAL '1 second' * timeout) < (NOW() at time zone 'utc')
"""
expired = self._fetchall(select, {})
# get list of alerts to be unshelved
select = """
WITH shelved AS (
SELECT DISTINCT ON (a.id) a.id, a.event, a.last_receive_id, h.update_time, a.timeout
FROM alerts a, UNNEST(history) h
WHERE a.status='shelved'
AND h.type='action'
AND h.status='shelved'
ORDER BY a.id, h.update_time DESC
)
SELECT id, event, last_receive_id
FROM shelved
WHERE (update_time + INTERVAL '1 second' * timeout) < (NOW() at time zone 'utc')
"""
unshelved = self._fetchall(select, {})
return (expired, unshelved)
|
def housekeeping(self, expired_threshold, info_threshold):
# delete 'closed' or 'expired' alerts older than "expired_threshold" hours
# and 'informational' alerts older than "info_threshold" hours
delete = """
DELETE FROM alerts
WHERE (status IN ('closed', 'expired')
AND last_receive_time < (NOW() at time zone 'utc' - INTERVAL '%(expired_threshold)s hours'))
OR (severity='informational'
AND last_receive_time < (NOW() at time zone 'utc' - INTERVAL '%(info_threshold)s hours'))
"""
self._delete(
delete,
{"expired_threshold": expired_threshold, "info_threshold": info_threshold},
)
# get list of alerts to be newly expired
update = """
SELECT id, event, last_receive_id
FROM alerts
WHERE status NOT IN ('expired','shelved') AND timeout!=0
AND (last_receive_time + INTERVAL '1 second' * timeout) < (NOW() at time zone 'utc')
"""
expired = self._fetchall(update, {})
# get list of alerts to be unshelved
update = """
SELECT id, event, last_receive_id
FROM alerts
WHERE status='shelved'
AND (last_receive_time + INTERVAL '1 second' * timeout) < (NOW() at time zone 'utc')
"""
unshelved = self._fetchall(update, {})
return (expired, unshelved)
|
https://github.com/alerta/alerta/issues/528
|
2018-04-28 00:06:43,862 - alerta[18702]: ERROR - HOUSEKEEPING FAILED: Type names and field names can only contain alphanumeric characters and underscores: '?column?' [in /usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/exceptions.py:67]
Traceback (most recent call last):
File "/usr/lib/python2.7/site-packages/flask/app.py", line 1612, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/lib/python2.7/site-packages/flask/app.py", line 1598, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/usr/lib/python2.7/site-packages/flask_cors/decorator.py", line 128, in wrapped_function
resp = make_response(f(*args, **kwargs))
File "/usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/auth/utils.py", line 95, in wrapped
return f(*args, **kwargs)
File "/usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/management/views.py", line 148, in housekeeping
raise ApiError('HOUSEKEEPING FAILED: %s' % e, 503)
ApiError: HOUSEKEEPING FAILED: Type names and field names can only contain alphanumeric characters and underscores: '?column?'
|
ApiError
|
def housekeeping(self, expired_threshold, info_threshold):
# delete 'closed' or 'expired' alerts older than "expired_threshold" hours
# and 'informational' alerts older than "info_threshold" hours
expired_hours_ago = datetime.utcnow() - timedelta(hours=expired_threshold)
g.db.alerts.remove(
{
"status": {"$in": ["closed", "expired"]},
"lastReceiveTime": {"$lt": expired_hours_ago},
}
)
info_hours_ago = datetime.utcnow() - timedelta(hours=info_threshold)
g.db.alerts.remove(
{"severity": "informational", "lastReceiveTime": {"$lt": info_hours_ago}}
)
# get list of alerts to be newly expired
pipeline = [
{
"$project": {
"event": 1,
"status": 1,
"lastReceiveId": 1,
"timeout": 1,
"expireTime": {
"$add": ["$lastReceiveTime", {"$multiply": ["$timeout", 1000]}]
},
}
},
{
"$match": {
"status": {"$nin": ["expired", "shelved"]},
"expireTime": {"$lt": datetime.utcnow()},
"timeout": {"$ne": 0},
}
},
]
expired = [
(r["_id"], r["event"], r["lastReceiveId"])
for r in g.db.alerts.aggregate(pipeline)
]
# get list of alerts to be unshelved
pipeline = [
{
"$project": {
"event": 1,
"status": 1,
"lastReceiveId": 1,
"timeout": 1,
"expireTime": {
"$add": ["$lastReceiveTime", {"$multiply": ["$timeout", 1000]}]
},
}
},
{
"$match": {
"status": "shelved",
"expireTime": {"$lt": datetime.utcnow()},
"timeout": {"$ne": 0},
}
},
]
unshelved = [
(r["_id"], r["event"], r["lastReceiveId"])
for r in g.db.alerts.aggregate(pipeline)
]
return (expired, unshelved)
|
def housekeeping(self, expired_threshold, info_threshold):
# delete 'closed' or 'expired' alerts older than "expired_threshold" hours
# and 'informational' alerts older than "info_threshold" hours
expired_hours_ago = datetime.utcnow() - timedelta(hours=expired_threshold)
g.db.alerts.remove(
{
"status": {"$in": ["closed", "expired"]},
"lastReceiveTime": {"$lt": expired_hours_ago},
}
)
info_hours_ago = datetime.utcnow() - timedelta(hours=info_threshold)
g.db.alerts.remove(
{"severity": "informational", "lastReceiveTime": {"$lt": info_hours_ago}}
)
# get list of alerts to be newly expired
pipeline = [
{
"$project": {
"event": 1,
"status": 1,
"lastReceiveId": 1,
"timeout": 1,
"expireTime": {
"$add": ["$lastReceiveTime", {"$multiply": ["$timeout", 1000]}]
},
}
},
{
"$match": {
"status": {"$nin": ["expired", "shelved"]},
"expireTime": {"$lt": datetime.utcnow()},
"timeout": {"$ne": 0},
}
},
]
expired = [
(r["_id"], r["event"], "expired", r["lastReceiveId"])
for r in g.db.alerts.aggregate(pipeline)
]
# get list of alerts to be unshelved
pipeline = [
{
"$project": {
"event": 1,
"status": 1,
"lastReceiveId": 1,
"timeout": 1,
"expireTime": {
"$add": ["$lastReceiveTime", {"$multiply": ["$timeout", 1000]}]
},
}
},
{
"$match": {
"status": "shelved",
"expireTime": {"$lt": datetime.utcnow()},
"timeout": {"$ne": 0},
}
},
]
unshelved = [
(r["_id"], r["event"], "open", r["lastReceiveId"])
for r in g.db.alerts.aggregate(pipeline)
]
return expired + unshelved
|
https://github.com/alerta/alerta/issues/528
|
2018-04-28 00:06:43,862 - alerta[18702]: ERROR - HOUSEKEEPING FAILED: Type names and field names can only contain alphanumeric characters and underscores: '?column?' [in /usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/exceptions.py:67]
Traceback (most recent call last):
File "/usr/lib/python2.7/site-packages/flask/app.py", line 1612, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/lib/python2.7/site-packages/flask/app.py", line 1598, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/usr/lib/python2.7/site-packages/flask_cors/decorator.py", line 128, in wrapped_function
resp = make_response(f(*args, **kwargs))
File "/usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/auth/utils.py", line 95, in wrapped
return f(*args, **kwargs)
File "/usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/management/views.py", line 148, in housekeeping
raise ApiError('HOUSEKEEPING FAILED: %s' % e, 503)
ApiError: HOUSEKEEPING FAILED: Type names and field names can only contain alphanumeric characters and underscores: '?column?'
|
ApiError
|
def housekeeping(self, expired_threshold, info_threshold):
# delete 'closed' or 'expired' alerts older than "expired_threshold" hours
# and 'informational' alerts older than "info_threshold" hours
delete = """
DELETE FROM alerts
WHERE (status IN ('closed', 'expired')
AND last_receive_time < (NOW() at time zone 'utc' - INTERVAL '%(expired_threshold)s hours'))
OR (severity='informational'
AND last_receive_time < (NOW() at time zone 'utc' - INTERVAL '%(info_threshold)s hours'))
"""
self._delete(
delete,
{"expired_threshold": expired_threshold, "info_threshold": info_threshold},
)
# get list of alerts to be newly expired
update = """
SELECT id, event, last_receive_id
FROM alerts
WHERE status NOT IN ('expired','shelved') AND timeout!=0
AND (last_receive_time + INTERVAL '1 second' * timeout) < (NOW() at time zone 'utc')
"""
expired = self._fetchall(update, {})
# get list of alerts to be unshelved
update = """
SELECT id, event, last_receive_id
FROM alerts
WHERE status='shelved'
AND (last_receive_time + INTERVAL '1 second' * timeout) < (NOW() at time zone 'utc')
"""
unshelved = self._fetchall(update, {})
return (expired, unshelved)
|
def housekeeping(self, expired_threshold, info_threshold):
# delete 'closed' or 'expired' alerts older than "expired_threshold" hours
# and 'informational' alerts older than "info_threshold" hours
delete = """
DELETE FROM alerts
WHERE (status IN ('closed', 'expired')
AND last_receive_time < (NOW() at time zone 'utc' - INTERVAL '%(expired_threshold)s hours'))
OR (severity='informational'
AND last_receive_time < (NOW() at time zone 'utc' - INTERVAL '%(info_threshold)s hours'))
"""
self._delete(
delete,
{"expired_threshold": expired_threshold, "info_threshold": info_threshold},
)
# get list of alerts to be newly expired
update = """
SELECT id, event, 'expired', last_receive_id
FROM alerts
WHERE status NOT IN ('expired','shelved') AND timeout!=0
AND (last_receive_time + INTERVAL '1 second' * timeout) < (NOW() at time zone 'utc')
"""
expired = self._fetchall(update, {})
# get list of alerts to be unshelved
update = """
SELECT id, event, 'open', last_receive_id
FROM alerts
WHERE status='shelved'
AND (last_receive_time + INTERVAL '1 second' * timeout) < (NOW() at time zone 'utc')
"""
unshelved = self._fetchall(update, {})
return expired + unshelved
|
https://github.com/alerta/alerta/issues/528
|
2018-04-28 00:06:43,862 - alerta[18702]: ERROR - HOUSEKEEPING FAILED: Type names and field names can only contain alphanumeric characters and underscores: '?column?' [in /usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/exceptions.py:67]
Traceback (most recent call last):
File "/usr/lib/python2.7/site-packages/flask/app.py", line 1612, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/lib/python2.7/site-packages/flask/app.py", line 1598, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/usr/lib/python2.7/site-packages/flask_cors/decorator.py", line 128, in wrapped_function
resp = make_response(f(*args, **kwargs))
File "/usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/auth/utils.py", line 95, in wrapped
return f(*args, **kwargs)
File "/usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/management/views.py", line 148, in housekeeping
raise ApiError('HOUSEKEEPING FAILED: %s' % e, 503)
ApiError: HOUSEKEEPING FAILED: Type names and field names can only contain alphanumeric characters and underscores: '?column?'
|
ApiError
|
def housekeeping(expired_threshold=2, info_threshold=12):
expired, unshelved = db.housekeeping(expired_threshold, info_threshold)
for id, event, last_receive_id in expired:
history = History(
id=last_receive_id,
event=event,
status="expired",
text="expired after timeout",
change_type="status",
update_time=datetime.utcnow(),
)
db.set_status(
id, "expired", timeout=current_app.config["ALERT_TIMEOUT"], history=history
)
for id, event, last_receive_id in unshelved:
history = History(
id=last_receive_id,
event=event,
status="open",
text="unshelved after timeout",
change_type="status",
update_time=datetime.utcnow(),
)
db.set_status(
id, "open", timeout=current_app.config["ALERT_TIMEOUT"], history=history
)
|
def housekeeping(expired_threshold=2, info_threshold=12):
for id, event, status, last_receive_id in db.housekeeping(
expired_threshold, info_threshold
):
if status == "open":
text = "unshelved after timeout"
elif status == "expired":
text = "expired after timeout"
else:
text = "alert timeout status change"
history = History(
id=last_receive_id,
event=event,
status=status,
text=text,
change_type="status",
update_time=datetime.utcnow(),
)
db.set_status(
id, status, timeout=current_app.config["ALERT_TIMEOUT"], history=history
)
|
https://github.com/alerta/alerta/issues/528
|
2018-04-28 00:06:43,862 - alerta[18702]: ERROR - HOUSEKEEPING FAILED: Type names and field names can only contain alphanumeric characters and underscores: '?column?' [in /usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/exceptions.py:67]
Traceback (most recent call last):
File "/usr/lib/python2.7/site-packages/flask/app.py", line 1612, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/lib/python2.7/site-packages/flask/app.py", line 1598, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/usr/lib/python2.7/site-packages/flask_cors/decorator.py", line 128, in wrapped_function
resp = make_response(f(*args, **kwargs))
File "/usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/auth/utils.py", line 95, in wrapped
return f(*args, **kwargs)
File "/usr/lib/python2.7/site-packages/alerta_server-5.2.0_-py2.7.egg/alerta/management/views.py", line 148, in housekeeping
raise ApiError('HOUSEKEEPING FAILED: %s' % e, 503)
ApiError: HOUSEKEEPING FAILED: Type names and field names can only contain alphanumeric characters and underscores: '?column?'
|
ApiError
|
def is_flapping(self, alert, window=1800, count=2):
"""
Return true if alert severity has changed more than X times in Y seconds
"""
pipeline = [
{
"$match": {
"environment": alert.environment,
"resource": alert.resource,
"event": alert.event,
"customer": alert.customer,
}
},
{"$unwind": "$history"},
{
"$match": {
"history.updateTime": {
"$gt": datetime.utcnow() - timedelta(seconds=window)
},
"history.type": "severity",
}
},
{"$group": {"_id": "$history.type", "count": {"$sum": 1}}},
]
responses = g.db.alerts.aggregate(pipeline)
for r in responses:
if r["count"] > count:
return True
return False
|
def is_flapping(self, alert, window=1800, count=2):
"""
Return true if alert severity has changed more than X times in Y seconds
"""
pipeline = [
{
"$match": {
"environment": alert.environment,
"resource": alert.resource,
"event": alert.event,
}
},
{"$unwind": "$history"},
{
"$match": {
"history.updateTime": {
"$gt": datetime.utcnow() - timedelta(seconds=window)
}
},
"history.type": "severity",
},
{"$group": {"_id": "$history.type", "count": {"$sum": 1}}},
]
responses = g.db.alerts.aggregate(pipeline)
for r in responses:
if r["count"] > count:
return True
return False
|
https://github.com/alerta/alerta/issues/456
|
2018-01-26 11:04:16,782 - alerta.plugins.flapping[9]: INFO - recieved exception [in /usr/local/lib/python3.6/site-packages/alerta_flapping-1.0.1-py3.6.egg/alerta_flapping.py:28]
2018-01-26 11:04:16,782 - alerta.plugins.flapping[9]: ERROR - A pipeline stage specification object must contain exactly one field. [in /usr/local/lib/python3.6/site-packages/alerta_flapping-1.0.1-py3.6.egg/alerta_flapping.py:29]
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/alerta_flapping-1.0.1-py3.6.egg/alerta_flapping.py", line 14, in pre_receive
result = alert.is_flapping(count=FLAPPING_COUNT, window=FLAPPING_WINDOW)
File "/usr/local/lib/python3.6/site-packages/alerta/models/alert.py", line 223, in is_flapping
return db.is_flapping(self, window, count)
File "/usr/local/lib/python3.6/site-packages/alerta/database/backends/mongodb/base.py", line 144, in is_flapping
responses = g.db.alerts.aggregate(pipeline)
File "/usr/local/lib/python3.6/site-packages/pymongo/collection.py", line 2181, in aggregate
**kwargs)
File "/usr/local/lib/python3.6/site-packages/pymongo/collection.py", line 2088, in _aggregate
client=self.__database.client)
File "/usr/local/lib/python3.6/site-packages/pymongo/pool.py", line 496, in command
collation=collation)
File "/usr/local/lib/python3.6/site-packages/pymongo/network.py", line 125, in command
parse_write_concern_error=parse_write_concern_error)
File "/usr/local/lib/python3.6/site-packages/pymongo/helpers.py", line 146, in _check_command_response
raise OperationFailure(msg % errmsg, code, response)
pymongo.errors.OperationFailure: A pipeline stage specification object must contain exactly one field.
|
pymongo.errors.OperationFailure
|
def main():
app.logger.info("Starting alerta version %s ...", __version__)
app.logger.info("Using MongoDB version %s ...", db.get_version())
app.run(host="0.0.0.0", port=8080, threaded=True)
|
def main():
app.run(host="0.0.0.0", port=8080, threaded=True)
|
https://github.com/alerta/alerta/issues/99
|
[Sat Jun 20 12:53:00.260834 2015] [:error] [pid 19124:tid 140441230903040] 2015-06-20 12:53:00,260 - alerta.app[19124]: INFO - Starting alerta version 4.4.7 ...
[Sat Jun 20 12:53:00.322813 2015] [:error] [pid 19124:tid 140441230903040] 2015-06-20 12:53:00,322 - alerta.app[19124]: DEBUG - Connected to mongodb://localhost:27017/monitoring
[Sat Jun 20 12:53:30.368402 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] mod_wsgi (pid=19124): Target WSGI script '/var/www/api.wsgi' cannot be loaded as Python module.
[Sat Jun 20 12:53:30.369482 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] mod_wsgi (pid=19124): Exception occurred processing WSGI script '/var/www/api.wsgi'.
[Sat Jun 20 12:53:30.369865 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] Traceback (most recent call last):
[Sat Jun 20 12:53:30.370266 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/var/www/api.wsgi", line 4, in <module>
[Sat Jun 20 12:53:30.370691 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] from alerta.app import app as application
[Sat Jun 20 12:53:30.370976 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/opt/alerta/lib/python2.7/site-packages/alerta/app/__init__.py", line 49, in <module>
[Sat Jun 20 12:53:30.371371 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] db = Mongo()
[Sat Jun 20 12:53:30.371839 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/opt/alerta/lib/python2.7/site-packages/alerta/app/database.py", line 81, in __init__
[Sat Jun 20 12:53:30.372459 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] self.version = self.db.collection_names()
[Sat Jun 20 12:53:30.372813 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/opt/alerta/lib/python2.7/site-packages/pymongo/database.py", line 488, in collection_names
[Sat Jun 20 12:53:30.373313 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] ReadPreference.PRIMARY) as (sock_info, slave_okay):
[Sat Jun 20 12:53:30.373520 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/usr/lib/python2.7/contextlib.py", line 17, in __enter__
[Sat Jun 20 12:53:30.373826 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] return self.gen.next()
[Sat Jun 20 12:53:30.374027 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/opt/alerta/lib/python2.7/site-packages/pymongo/mongo_client.py", line 699, in _socket_for_reads
[Sat Jun 20 12:53:30.374501 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] with self._get_socket(read_preference) as sock_info:
[Sat Jun 20 12:53:30.374706 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/usr/lib/python2.7/contextlib.py", line 17, in __enter__
[Sat Jun 20 12:53:30.374978 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] return self.gen.next()
[Sat Jun 20 12:53:30.375184 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/opt/alerta/lib/python2.7/site-packages/pymongo/mongo_client.py", line 663, in _get_socket
[Sat Jun 20 12:53:30.375440 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] server = self._get_topology().select_server(selector)
[Sat Jun 20 12:53:30.375661 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/opt/alerta/lib/python2.7/site-packages/pymongo/topology.py", line 121, in select_server
[Sat Jun 20 12:53:30.376016 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] address))
[Sat Jun 20 12:53:30.376039 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] File "/opt/alerta/lib/python2.7/site-packages/pymongo/topology.py", line 97, in select_servers
[Sat Jun 20 12:53:30.376062 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] self._error_message(selector))
[Sat Jun 20 12:53:30.376095 2015] [:error] [pid 19124:tid 140441230903040] [remote 192.168.0.1:8204] ServerSelectionTimeoutError: No servers found yet
|
ServerSelectionTimeoutError
|
def set(self, value):
db.metrics.update_one(
{"group": self.group, "name": self.name},
{
"$set": {
"group": self.group,
"name": self.name,
"title": self.title,
"description": self.description,
"value": value,
"type": "gauge",
}
},
upsert=True,
)
|
def set(self, value):
db.metrics.update_one(
{"group": self.group, "name": self.name},
{
"group": self.group,
"name": self.name,
"title": self.title,
"description": self.description,
"value": value,
"type": "gauge",
},
True,
)
|
https://github.com/alerta/alerta/issues/96
|
5.172.237.230 - - [13/Jun/2015 19:50:25] "GET /management/status HTTP/1.1" 500 -
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1836, in __call__
return self.wsgi_app(environ, start_response)
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1820, in wsgi_app
response = self.make_response(self.handle_exception(e))
File "/usr/local/lib/python2.7/dist-packages/flask_cors/extension.py", line 110, in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1403, in handle_exception
reraise(exc_type, exc_value, tb)
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1817, in wsgi_app
response = self.full_dispatch_request()
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1477, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/usr/local/lib/python2.7/dist-packages/flask_cors/extension.py", line 110, in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1381, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1475, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1461, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/usr/local/lib/python2.7/dist-packages/flask_cors/decorator.py", line 120, in wrapped_function
resp = make_response(f(*args, **kwargs))
File "/usr/local/lib/python2.7/dist-packages/alerta_server-4.4.4-py2.7.egg/alerta/app/management/views.py", line 121, in status
total_alert_gauge.set(db.get_count())
File "/usr/local/lib/python2.7/dist-packages/alerta_server-4.4.4-py2.7.egg/alerta/app/metrics.py", line 38, in set
True
File "/usr/local/lib/python2.7/dist-packages/pymongo/collection.py", line 635, in update_one
common.validate_ok_for_update(update)
File "/usr/local/lib/python2.7/dist-packages/pymongo/common.py", line 377, in validate_ok_for_update
raise ValueError('update only works with $ operators')
ValueError: update only works with $ operators
|
ValueError
|
def inc(self):
db.metrics.update_one(
{"group": self.group, "name": self.name},
{
"$set": {
"group": self.group,
"name": self.name,
"title": self.title,
"description": self.description,
"type": "counter",
},
"$inc": {"count": 1},
},
upsert=True,
)
|
def inc(self):
db.metrics.update_one(
{"group": self.group, "name": self.name},
{
"$set": {
"group": self.group,
"name": self.name,
"title": self.title,
"description": self.description,
"type": "counter",
},
"$inc": {"count": 1},
},
True,
)
|
https://github.com/alerta/alerta/issues/96
|
5.172.237.230 - - [13/Jun/2015 19:50:25] "GET /management/status HTTP/1.1" 500 -
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1836, in __call__
return self.wsgi_app(environ, start_response)
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1820, in wsgi_app
response = self.make_response(self.handle_exception(e))
File "/usr/local/lib/python2.7/dist-packages/flask_cors/extension.py", line 110, in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1403, in handle_exception
reraise(exc_type, exc_value, tb)
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1817, in wsgi_app
response = self.full_dispatch_request()
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1477, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/usr/local/lib/python2.7/dist-packages/flask_cors/extension.py", line 110, in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1381, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1475, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1461, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/usr/local/lib/python2.7/dist-packages/flask_cors/decorator.py", line 120, in wrapped_function
resp = make_response(f(*args, **kwargs))
File "/usr/local/lib/python2.7/dist-packages/alerta_server-4.4.4-py2.7.egg/alerta/app/management/views.py", line 121, in status
total_alert_gauge.set(db.get_count())
File "/usr/local/lib/python2.7/dist-packages/alerta_server-4.4.4-py2.7.egg/alerta/app/metrics.py", line 38, in set
True
File "/usr/local/lib/python2.7/dist-packages/pymongo/collection.py", line 635, in update_one
common.validate_ok_for_update(update)
File "/usr/local/lib/python2.7/dist-packages/pymongo/common.py", line 377, in validate_ok_for_update
raise ValueError('update only works with $ operators')
ValueError: update only works with $ operators
|
ValueError
|
def stop_timer(self, start):
db.metrics.update_one(
{"group": self.group, "name": self.name},
{
"$set": {
"group": self.group,
"name": self.name,
"title": self.title,
"description": self.description,
"type": "timer",
},
"$inc": {"count": 1, "totalTime": self._time_in_millis() - start},
},
upsert=True,
)
|
def stop_timer(self, start):
db.metrics.update_one(
{"group": self.group, "name": self.name},
{
"$set": {
"group": self.group,
"name": self.name,
"title": self.title,
"description": self.description,
"type": "timer",
},
"$inc": {"count": 1, "totalTime": self._time_in_millis() - start},
},
True,
)
|
https://github.com/alerta/alerta/issues/96
|
5.172.237.230 - - [13/Jun/2015 19:50:25] "GET /management/status HTTP/1.1" 500 -
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1836, in __call__
return self.wsgi_app(environ, start_response)
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1820, in wsgi_app
response = self.make_response(self.handle_exception(e))
File "/usr/local/lib/python2.7/dist-packages/flask_cors/extension.py", line 110, in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1403, in handle_exception
reraise(exc_type, exc_value, tb)
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1817, in wsgi_app
response = self.full_dispatch_request()
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1477, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/usr/local/lib/python2.7/dist-packages/flask_cors/extension.py", line 110, in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1381, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1475, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1461, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/usr/local/lib/python2.7/dist-packages/flask_cors/decorator.py", line 120, in wrapped_function
resp = make_response(f(*args, **kwargs))
File "/usr/local/lib/python2.7/dist-packages/alerta_server-4.4.4-py2.7.egg/alerta/app/management/views.py", line 121, in status
total_alert_gauge.set(db.get_count())
File "/usr/local/lib/python2.7/dist-packages/alerta_server-4.4.4-py2.7.egg/alerta/app/metrics.py", line 38, in set
True
File "/usr/local/lib/python2.7/dist-packages/pymongo/collection.py", line 635, in update_one
common.validate_ok_for_update(update)
File "/usr/local/lib/python2.7/dist-packages/pymongo/common.py", line 377, in validate_ok_for_update
raise ValueError('update only works with $ operators')
ValueError: update only works with $ operators
|
ValueError
|
def refine_from_db(path, video):
if isinstance(video, Episode):
db = sqlite3.connect(
os.path.join(args.config_dir, "db", "bazarr.db"), timeout=30
)
c = db.cursor()
data = c.execute(
"SELECT table_shows.title, table_episodes.season, table_episodes.episode, table_episodes.title, table_shows.year, table_shows.tvdbId, table_shows.alternateTitles, table_episodes.format, table_episodes.resolution, table_episodes.video_codec, table_episodes.audio_codec FROM table_episodes INNER JOIN table_shows on table_shows.sonarrSeriesId = table_episodes.sonarrSeriesId WHERE table_episodes.path = ?",
(unicode(path_replace_reverse(path)),),
).fetchone()
db.close()
if data:
video.series = re.sub(r"(\(\d\d\d\d\))", "", data[0])
video.season = int(data[1])
video.episode = int(data[2])
video.title = data[3]
if data[4]:
if int(data[4]) > 0:
video.year = int(data[4])
video.series_tvdb_id = int(data[5])
video.alternative_series = ast.literal_eval(data[6])
if not video.format:
video.format = str(data[7])
if not video.resolution:
video.resolution = str(data[8])
if not video.video_codec:
if data[9]:
video.video_codec = data[9]
if not video.audio_codec:
if data[10]:
video.audio_codec = data[10]
elif isinstance(video, Movie):
db = sqlite3.connect(
os.path.join(args.config_dir, "db", "bazarr.db"), timeout=30
)
c = db.cursor()
data = c.execute(
"SELECT title, year, alternativeTitles, format, resolution, video_codec, audio_codec, imdbId FROM table_movies WHERE path = ?",
(unicode(path_replace_reverse_movie(path)),),
).fetchone()
db.close()
if data:
video.title = re.sub(r"(\(\d\d\d\d\))", "", data[0])
if data[1]:
if int(data[1]) > 0:
video.year = int(data[1])
if data[7]:
video.imdb_id = data[7]
video.alternative_titles = ast.literal_eval(data[2])
if not video.format:
if data[3]:
video.format = data[3]
if not video.resolution:
if data[4]:
video.resolution = data[4]
if not video.video_codec:
if data[5]:
video.video_codec = data[5]
if not video.audio_codec:
if data[6]:
video.audio_codec = data[6]
return video
|
def refine_from_db(path, video):
if isinstance(video, Episode):
db = sqlite3.connect(
os.path.join(args.config_dir, "db", "bazarr.db"), timeout=30
)
c = db.cursor()
data = c.execute(
"SELECT table_shows.title, table_episodes.season, table_episodes.episode, table_episodes.title, table_shows.year, table_shows.tvdbId, table_shows.alternateTitles, table_episodes.format, table_episodes.resolution, table_episodes.video_codec, table_episodes.audio_codec FROM table_episodes INNER JOIN table_shows on table_shows.sonarrSeriesId = table_episodes.sonarrSeriesId WHERE table_episodes.path = ?",
(path_replace_reverse(path),),
).fetchone()
db.close()
if data:
video.series = re.sub(r"(\(\d\d\d\d\))", "", data[0])
video.season = int(data[1])
video.episode = int(data[2])
video.title = data[3]
if int(data[4]) > 0:
video.year = int(data[4])
video.series_tvdb_id = int(data[5])
video.alternative_series = ast.literal_eval(data[6])
if not video.format:
video.format = str(data[7])
if not video.resolution:
video.resolution = str(data[8])
if not video.video_codec:
if data[9]:
video.video_codec = data[9]
if not video.audio_codec:
if data[10]:
video.audio_codec = data[10]
elif isinstance(video, Movie):
db = sqlite3.connect(
os.path.join(args.config_dir, "db", "bazarr.db"), timeout=30
)
c = db.cursor()
data = c.execute(
"SELECT title, year, alternativeTitles, format, resolution, video_codec, audio_codec, imdbId FROM table_movies WHERE path = ?",
(path_replace_reverse_movie(path),),
).fetchone()
db.close()
if data:
video.title = re.sub(r"(\(\d\d\d\d\))", "", data[0])
if int(data[1]) > 0:
video.year = int(data[1])
if data[7]:
video.imdb_id = data[7]
video.alternative_titles = ast.literal_eval(data[2])
if not video.format:
if data[3]:
video.format = data[3]
if not video.resolution:
if data[4]:
video.resolution = data[4]
if not video.video_codec:
if data[5]:
video.video_codec = data[5]
if not video.audio_codec:
if data[6]:
video.audio_codec = data[6]
return video
|
https://github.com/morpheus65535/bazarr/issues/346
|
2/03/2019 20:39:25|ERROR |root |BAZARR Error trying to get video information for this file: /volume1/remote/media/TV/Nice Serie/Season 01/the.serie.s01e10.1080p.web.x264-tbs.mkv|Traceback (most recent call last): File "/volume1/@appstore/bazarr/bazarr/get_subtitle.py", line 69, in get_video refine_from_db(original_path,video) File "/volume1/@appstore/bazarr/bazarr/get_subtitle.py", line 670, in refine_from_db if int(data[4]) > 0: video.year = int(data[4])TypeError: int() argument must be a string or a number, not 'NoneType'|
|
TypeError
|
def sync_episodes():
logging.debug("Starting episode sync from Sonarr.")
from get_settings import get_sonarr_settings
url_sonarr = get_sonarr_settings()[6]
apikey_sonarr = get_sonarr_settings()[4]
# Open database connection
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
# Get current episodes id in DB
current_episodes_db = c.execute(
"SELECT sonarrEpisodeId FROM table_episodes"
).fetchall()
current_episodes_db_list = [x[0] for x in current_episodes_db]
current_episodes_sonarr = []
episodes_to_update = []
episodes_to_add = []
# Get sonarrId for each series from database
seriesIdList = c.execute("SELECT sonarrSeriesId FROM table_shows").fetchall()
# Close database connection
c.close()
for seriesId in seriesIdList:
# Get episodes data for a series from Sonarr
url_sonarr_api_episode = (
url_sonarr
+ "/api/episode?seriesId="
+ str(seriesId[0])
+ "&apikey="
+ apikey_sonarr
)
try:
r = requests.get(url_sonarr_api_episode, timeout=15, verify=False)
r.raise_for_status()
except requests.exceptions.HTTPError as errh:
logging.exception("Error trying to get episodes from Sonarr. Http error.")
except requests.exceptions.ConnectionError as errc:
logging.exception(
"Error trying to get episodes from Sonarr. Connection Error."
)
except requests.exceptions.Timeout as errt:
logging.exception(
"Error trying to get episodes from Sonarr. Timeout Error."
)
except requests.exceptions.RequestException as err:
logging.exception("Error trying to get episodes from Sonarr.")
else:
for episode in r.json():
if "hasFile" in episode:
if episode["hasFile"] is True:
if "episodeFile" in episode:
if episode["episodeFile"]["size"] > 20480:
# Add shows in Sonarr to current shows list
if "sceneName" in episode["episodeFile"]:
sceneName = episode["episodeFile"]["sceneName"]
else:
sceneName = None
# Add episodes in sonarr to current episode list
current_episodes_sonarr.append(episode["id"])
if episode["id"] in current_episodes_db_list:
episodes_to_update.append(
(
episode["title"],
episode["episodeFile"]["path"],
episode["seasonNumber"],
episode["episodeNumber"],
sceneName,
str(bool(episode["monitored"])),
episode["id"],
)
)
else:
episodes_to_add.append(
(
episode["seriesId"],
episode["id"],
episode["title"],
episode["episodeFile"]["path"],
episode["seasonNumber"],
episode["episodeNumber"],
sceneName,
str(bool(episode["monitored"])),
)
)
removed_episodes = list(
set(current_episodes_db_list) - set(current_episodes_sonarr)
)
# Update or insert movies in DB
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
updated_result = c.executemany(
"""UPDATE table_episodes SET title = ?, path = ?, season = ?, episode = ?, scene_name = ?, monitored = ? WHERE sonarrEpisodeId = ?""",
episodes_to_update,
)
db.commit()
added_result = c.executemany(
"""INSERT OR IGNORE INTO table_episodes(sonarrSeriesId, sonarrEpisodeId, title, path, season, episode, scene_name, monitored) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
episodes_to_add,
)
db.commit()
for removed_episode in removed_episodes:
c.execute(
"DELETE FROM table_episodes WHERE sonarrEpisodeId = ?", (removed_episode,)
)
db.commit()
# Close database connection
c.close()
for added_episode in episodes_to_add:
store_subtitles(path_replace(added_episode[3]))
logging.debug("All episodes synced from Sonarr into database.")
list_missing_subtitles()
logging.debug("All missing subtitles updated in database.")
|
def sync_episodes():
logging.debug("Starting episode sync from Sonarr.")
from get_settings import get_sonarr_settings
url_sonarr = get_sonarr_settings()[6]
apikey_sonarr = get_sonarr_settings()[4]
# Open database connection
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
# Get current episodes id in DB
current_episodes_db = c.execute(
"SELECT sonarrEpisodeId FROM table_episodes"
).fetchall()
current_episodes_db_list = [x[0] for x in current_episodes_db]
current_episodes_sonarr = []
episodes_to_update = []
episodes_to_add = []
# Get sonarrId for each series from database
seriesIdList = c.execute("SELECT sonarrSeriesId FROM table_shows").fetchall()
# Close database connection
c.close()
for seriesId in seriesIdList:
# Get episodes data for a series from Sonarr
url_sonarr_api_episode = (
url_sonarr
+ "/api/episode?seriesId="
+ str(seriesId[0])
+ "&apikey="
+ apikey_sonarr
)
try:
r = requests.get(url_sonarr_api_episode, timeout=15, verify=False)
r.raise_for_status()
except requests.exceptions.HTTPError as errh:
logging.exception("Error trying to get episodes from Sonarr. Http error.")
except requests.exceptions.ConnectionError as errc:
logging.exception(
"Error trying to get episodes from Sonarr. Connection Error."
)
except requests.exceptions.Timeout as errt:
logging.exception(
"Error trying to get episodes from Sonarr. Timeout Error."
)
except requests.exceptions.RequestException as err:
logging.exception("Error trying to get episodes from Sonarr.")
else:
for episode in r.json():
if "hasFile" in episode:
if episode["hasFile"] is True:
if "episodeFile" in episode:
if episode["episodeFile"]["size"] > 20480:
# Add shows in Sonarr to current shows list
if "sceneName" in episode["episodeFile"]:
sceneName = episode["episodeFile"]["sceneName"]
else:
sceneName = None
# Add episodes in sonarr to current episode list
current_episodes_sonarr.append(episode["id"])
if episode["id"] in current_episodes_db_list:
episodes_to_update.append(
(
episode["title"],
episode["episodeFile"]["path"],
episode["seasonNumber"],
episode["episodeNumber"],
sceneName,
str(bool(episode["monitored"])),
episode["id"],
)
)
else:
episodes_to_add.append(
(
episode["seriesId"],
episode["id"],
episode["title"],
episode["episodeFile"]["path"],
episode["seasonNumber"],
episode["episodeNumber"],
sceneName,
str(bool(episode["monitored"])),
)
)
removed_episodes = list(
set(current_episodes_db_list) - set(current_episodes_sonarr)
)
# Update or insert movies in DB
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
updated_result = c.executemany(
"""UPDATE table_episodes SET title = ?, path = ?, season = ?, episode = ?, scene_name = ?, monitored = ? WHERE sonarrEpisodeId = ?""",
episodes_to_update,
)
db.commit()
try:
added_result = c.executemany(
"""INSERT INTO table_episodes(sonarrSeriesId, sonarrEpisodeId, title, path, season, episode, scene_name, monitored) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
episodes_to_add,
)
except sqlite3.IntegrityError as e:
logging.exception(
"You're probably an early adopter of Bazarr and this is a known issue. Please open an issue on Github and we'll fix this."
)
else:
db.commit()
for removed_episode in removed_episodes:
c.execute(
"DELETE FROM table_episodes WHERE sonarrEpisodeId = ?", (removed_episode,)
)
db.commit()
# Close database connection
c.close()
for added_episode in episodes_to_add:
store_subtitles(path_replace(added_episode[3]))
logging.debug("All episodes synced from Sonarr into database.")
list_missing_subtitles()
logging.debug("All missing subtitles updated in database.")
|
https://github.com/morpheus65535/bazarr/issues/153
|
26/09/2018 04:04:37|ERROR|Job "Update movies list from Radarr (trigger: interval[0:05:00], next run at: 2018-09-26 04:09:33 PDT)" raised an exception|'Traceback (most recent call last):\n File "/app/libs/apscheduler/executors/base.py", line 125, in run_job\n retval = job.func(*job.args, **job.kwargs)\n File "/app/get_movies.py", line 100, in update_movies\n added_result = c.executemany(\'\'\'INSERT INTO table_movies(title, path, tmdbId, languages, subtitles,`hearing_impaired`, radarrId, overview, poster, fanart, `audio_language`, sceneName, monitored) VALUES (?,?,?,(SELECT languages FROM table_movies WHERE tmdbId = ?), \'[]\',(SELECT `hearing_impaired` FROM table_movies WHERE tmdbId = ?), ?, ?, ?, ?, ?, ?, ?)\'\'\', movies_to_add)\nIntegrityError: UNIQUE constraint failed: table_movies.path'|
|
nIntegrityError
|
def update_movies():
logging.debug("Starting movie sync from Radarr.")
from get_settings import get_radarr_settings
url_radarr = get_radarr_settings()[6]
apikey_radarr = get_radarr_settings()[4]
movie_default_enabled = get_general_settings()[18]
movie_default_language = get_general_settings()[19]
movie_default_hi = get_general_settings()[20]
if apikey_radarr == None:
pass
else:
get_profile_list()
# Get movies data from radarr
url_radarr_api_movies = url_radarr + "/api/movie?apikey=" + apikey_radarr
try:
r = requests.get(url_radarr_api_movies, timeout=15, verify=False)
r.raise_for_status()
except requests.exceptions.HTTPError as errh:
logging.exception("Error trying to get movies from Radarr. Http error.")
except requests.exceptions.ConnectionError as errc:
logging.exception(
"Error trying to get movies from Radarr. Connection Error."
)
except requests.exceptions.Timeout as errt:
logging.exception("Error trying to get movies from Radarr. Timeout Error.")
except requests.exceptions.RequestException as err:
logging.exception("Error trying to get movies from Radarr.")
else:
# Get current movies in DB
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
current_movies_db = c.execute("SELECT tmdbId FROM table_movies").fetchall()
db.close()
current_movies_db_list = [x[0] for x in current_movies_db]
current_movies_radarr = []
movies_to_update = []
movies_to_add = []
for movie in r.json():
if movie["hasFile"] is True:
if "movieFile" in movie:
try:
overview = unicode(movie["overview"])
except:
overview = ""
try:
poster_big = movie["images"][0]["url"]
poster = (
os.path.splitext(poster_big)[0]
+ "-500"
+ os.path.splitext(poster_big)[1]
)
except:
poster = ""
try:
fanart = movie["images"][1]["url"]
except:
fanart = ""
if "sceneName" in movie["movieFile"]:
sceneName = movie["movieFile"]["sceneName"]
else:
sceneName = None
# Add movies in radarr to current movies list
current_movies_radarr.append(unicode(movie["tmdbId"]))
# Detect file separator
if movie["path"][0] == "/":
separator = "/"
else:
separator = "\\"
if unicode(movie["tmdbId"]) in current_movies_db_list:
movies_to_update.append(
(
movie["title"],
movie["path"]
+ separator
+ movie["movieFile"]["relativePath"],
movie["tmdbId"],
movie["id"],
overview,
poster,
fanart,
profile_id_to_language(movie["qualityProfileId"]),
sceneName,
unicode(bool(movie["monitored"])),
movie["tmdbId"],
)
)
else:
if movie_default_enabled is True:
movies_to_add.append(
(
movie["title"],
movie["path"]
+ separator
+ movie["movieFile"]["relativePath"],
movie["tmdbId"],
movie_default_language,
"[]",
movie_default_hi,
movie["id"],
overview,
poster,
fanart,
profile_id_to_language(
movie["qualityProfileId"]
),
sceneName,
unicode(bool(movie["monitored"])),
)
)
else:
movies_to_add.append(
(
movie["title"],
movie["path"]
+ separator
+ movie["movieFile"]["relativePath"],
movie["tmdbId"],
movie["tmdbId"],
movie["tmdbId"],
movie["id"],
overview,
poster,
fanart,
profile_id_to_language(
movie["qualityProfileId"]
),
sceneName,
unicode(bool(movie["monitored"])),
)
)
# Update or insert movies in DB
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
updated_result = c.executemany(
"""UPDATE table_movies SET title = ?, path = ?, tmdbId = ?, radarrId = ?, overview = ?, poster = ?, fanart = ?, `audio_language` = ?, sceneName = ?, monitored = ? WHERE tmdbid = ?""",
movies_to_update,
)
db.commit()
if movie_default_enabled is True:
added_result = c.executemany(
"""INSERT OR IGNORE INTO table_movies(title, path, tmdbId, languages, subtitles,`hearing_impaired`, radarrId, overview, poster, fanart, `audio_language`, sceneName, monitored) VALUES (?,?,?,?,?, ?, ?, ?, ?, ?, ?, ?, ?)""",
movies_to_add,
)
db.commit()
else:
added_result = c.executemany(
"""INSERT OR IGNORE INTO table_movies(title, path, tmdbId, languages, subtitles,`hearing_impaired`, radarrId, overview, poster, fanart, `audio_language`, sceneName, monitored) VALUES (?,?,?,(SELECT languages FROM table_movies WHERE tmdbId = ?), '[]',(SELECT `hearing_impaired` FROM table_movies WHERE tmdbId = ?), ?, ?, ?, ?, ?, ?, ?)""",
movies_to_add,
)
db.commit()
db.close()
added_movies = list(
set(current_movies_radarr) - set(current_movies_db_list)
)
removed_movies = list(
set(current_movies_db_list) - set(current_movies_radarr)
)
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
c.executemany("DELETE FROM table_movies WHERE tmdbId = ?", removed_movies)
db.commit()
db.close()
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
for added_movie in added_movies:
added_path = c.execute(
"SELECT path FROM table_movies WHERE tmdbId = ?", (added_movie,)
).fetchone()
store_subtitles_movie(path_replace_movie(added_path[0]))
db.close()
logging.debug("All movies synced from Radarr into database.")
list_missing_subtitles_movies()
logging.debug("All movie missing subtitles updated in database.")
|
def update_movies():
logging.debug("Starting movie sync from Radarr.")
from get_settings import get_radarr_settings
url_radarr = get_radarr_settings()[6]
apikey_radarr = get_radarr_settings()[4]
movie_default_enabled = get_general_settings()[18]
movie_default_language = get_general_settings()[19]
movie_default_hi = get_general_settings()[20]
if apikey_radarr == None:
pass
else:
get_profile_list()
# Get movies data from radarr
url_radarr_api_movies = url_radarr + "/api/movie?apikey=" + apikey_radarr
try:
r = requests.get(url_radarr_api_movies, timeout=15, verify=False)
r.raise_for_status()
except requests.exceptions.HTTPError as errh:
logging.exception("Error trying to get movies from Radarr. Http error.")
except requests.exceptions.ConnectionError as errc:
logging.exception(
"Error trying to get movies from Radarr. Connection Error."
)
except requests.exceptions.Timeout as errt:
logging.exception("Error trying to get movies from Radarr. Timeout Error.")
except requests.exceptions.RequestException as err:
logging.exception("Error trying to get movies from Radarr.")
else:
# Get current movies in DB
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
current_movies_db = c.execute("SELECT tmdbId FROM table_movies").fetchall()
db.close()
current_movies_db_list = [x[0] for x in current_movies_db]
current_movies_radarr = []
movies_to_update = []
movies_to_add = []
for movie in r.json():
if movie["hasFile"] is True:
if "movieFile" in movie:
try:
overview = unicode(movie["overview"])
except:
overview = ""
try:
poster_big = movie["images"][0]["url"]
poster = (
os.path.splitext(poster_big)[0]
+ "-500"
+ os.path.splitext(poster_big)[1]
)
except:
poster = ""
try:
fanart = movie["images"][1]["url"]
except:
fanart = ""
if "sceneName" in movie["movieFile"]:
sceneName = movie["movieFile"]["sceneName"]
else:
sceneName = None
# Add movies in radarr to current movies list
current_movies_radarr.append(unicode(movie["tmdbId"]))
# Detect file separator
if movie["path"][0] == "/":
separator = "/"
else:
separator = "\\"
if unicode(movie["tmdbId"]) in current_movies_db_list:
movies_to_update.append(
(
movie["title"],
movie["path"]
+ separator
+ movie["movieFile"]["relativePath"],
movie["tmdbId"],
movie["id"],
overview,
poster,
fanart,
profile_id_to_language(movie["qualityProfileId"]),
sceneName,
unicode(bool(movie["monitored"])),
movie["tmdbId"],
)
)
else:
if movie_default_enabled is True:
movies_to_add.append(
(
movie["title"],
movie["path"]
+ separator
+ movie["movieFile"]["relativePath"],
movie["tmdbId"],
movie_default_language,
"[]",
movie_default_hi,
movie["id"],
overview,
poster,
fanart,
profile_id_to_language(
movie["qualityProfileId"]
),
sceneName,
unicode(bool(movie["monitored"])),
)
)
else:
movies_to_add.append(
(
movie["title"],
movie["path"]
+ separator
+ movie["movieFile"]["relativePath"],
movie["tmdbId"],
movie["tmdbId"],
movie["tmdbId"],
movie["id"],
overview,
poster,
fanart,
profile_id_to_language(
movie["qualityProfileId"]
),
sceneName,
unicode(bool(movie["monitored"])),
)
)
# Update or insert movies in DB
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
updated_result = c.executemany(
"""UPDATE table_movies SET title = ?, path = ?, tmdbId = ?, radarrId = ?, overview = ?, poster = ?, fanart = ?, `audio_language` = ?, sceneName = ?, monitored = ? WHERE tmdbid = ?""",
movies_to_update,
)
db.commit()
if movie_default_enabled is True:
added_result = c.executemany(
"""INSERT INTO table_movies(title, path, tmdbId, languages, subtitles,`hearing_impaired`, radarrId, overview, poster, fanart, `audio_language`, sceneName, monitored) VALUES (?,?,?,?,?, ?, ?, ?, ?, ?, ?, ?, ?)""",
movies_to_add,
)
db.commit()
else:
added_result = c.executemany(
"""INSERT INTO table_movies(title, path, tmdbId, languages, subtitles,`hearing_impaired`, radarrId, overview, poster, fanart, `audio_language`, sceneName, monitored) VALUES (?,?,?,(SELECT languages FROM table_movies WHERE tmdbId = ?), '[]',(SELECT `hearing_impaired` FROM table_movies WHERE tmdbId = ?), ?, ?, ?, ?, ?, ?, ?)""",
movies_to_add,
)
db.commit()
db.close()
added_movies = list(
set(current_movies_radarr) - set(current_movies_db_list)
)
removed_movies = list(
set(current_movies_db_list) - set(current_movies_radarr)
)
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
c.executemany("DELETE FROM table_movies WHERE tmdbId = ?", removed_movies)
db.commit()
db.close()
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
for added_movie in added_movies:
added_path = c.execute(
"SELECT path FROM table_movies WHERE tmdbId = ?", (added_movie,)
).fetchone()
store_subtitles_movie(path_replace_movie(added_path[0]))
db.close()
logging.debug("All movies synced from Radarr into database.")
list_missing_subtitles_movies()
logging.debug("All movie missing subtitles updated in database.")
|
https://github.com/morpheus65535/bazarr/issues/153
|
26/09/2018 04:04:37|ERROR|Job "Update movies list from Radarr (trigger: interval[0:05:00], next run at: 2018-09-26 04:09:33 PDT)" raised an exception|'Traceback (most recent call last):\n File "/app/libs/apscheduler/executors/base.py", line 125, in run_job\n retval = job.func(*job.args, **job.kwargs)\n File "/app/get_movies.py", line 100, in update_movies\n added_result = c.executemany(\'\'\'INSERT INTO table_movies(title, path, tmdbId, languages, subtitles,`hearing_impaired`, radarrId, overview, poster, fanart, `audio_language`, sceneName, monitored) VALUES (?,?,?,(SELECT languages FROM table_movies WHERE tmdbId = ?), \'[]\',(SELECT `hearing_impaired` FROM table_movies WHERE tmdbId = ?), ?, ?, ?, ?, ?, ?, ?)\'\'\', movies_to_add)\nIntegrityError: UNIQUE constraint failed: table_movies.path'|
|
nIntegrityError
|
def update_series():
from get_settings import get_sonarr_settings
url_sonarr = get_sonarr_settings()[6]
apikey_sonarr = get_sonarr_settings()[4]
serie_default_enabled = get_general_settings()[15]
serie_default_language = get_general_settings()[16]
serie_default_hi = get_general_settings()[17]
if apikey_sonarr == None:
pass
else:
get_profile_list()
# Get shows data from Sonarr
url_sonarr_api_series = url_sonarr + "/api/series?apikey=" + apikey_sonarr
try:
r = requests.get(url_sonarr_api_series, timeout=15, verify=False)
r.raise_for_status()
except requests.exceptions.HTTPError as errh:
logging.exception("Error trying to get series from Sonarr. Http error.")
except requests.exceptions.ConnectionError as errc:
logging.exception(
"Error trying to get series from Sonarr. Connection Error."
)
except requests.exceptions.Timeout as errt:
logging.exception("Error trying to get series from Sonarr. Timeout Error.")
except requests.exceptions.RequestException as err:
logging.exception("Error trying to get series from Sonarr.")
else:
# Open database connection
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
# Get current shows in DB
current_shows_db = c.execute("SELECT tvdbId FROM table_shows").fetchall()
# Close database connection
db.close()
current_shows_db_list = [x[0] for x in current_shows_db]
current_shows_sonarr = []
series_to_update = []
series_to_add = []
for show in r.json():
try:
overview = unicode(show["overview"])
except:
overview = ""
try:
poster_big = show["images"][2]["url"].split("?")[0]
poster = (
os.path.splitext(poster_big)[0]
+ "-250"
+ os.path.splitext(poster_big)[1]
)
except:
poster = ""
try:
fanart = show["images"][0]["url"].split("?")[0]
except:
fanart = ""
# Add shows in Sonarr to current shows list
current_shows_sonarr.append(show["tvdbId"])
if show["tvdbId"] in current_shows_db_list:
series_to_update.append(
(
show["title"],
show["path"],
show["tvdbId"],
show["id"],
overview,
poster,
fanart,
profile_id_to_language(
(
show["qualityProfileId"]
if sonarr_version == 2
else show["languageProfileId"]
)
),
show["sortTitle"],
show["tvdbId"],
)
)
else:
if serie_default_enabled is True:
series_to_add.append(
(
show["title"],
show["path"],
show["tvdbId"],
serie_default_language,
serie_default_hi,
show["id"],
overview,
poster,
fanart,
profile_id_to_language(show["qualityProfileId"]),
show["sortTitle"],
)
)
else:
series_to_add.append(
(
show["title"],
show["path"],
show["tvdbId"],
show["tvdbId"],
show["tvdbId"],
show["id"],
overview,
poster,
fanart,
profile_id_to_language(show["qualityProfileId"]),
show["sortTitle"],
)
)
# Update or insert series in DB
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
updated_result = c.executemany(
"""UPDATE table_shows SET title = ?, path = ?, tvdbId = ?, sonarrSeriesId = ?, overview = ?, poster = ?, fanart = ?, `audio_language` = ? , sortTitle = ? WHERE tvdbid = ?""",
series_to_update,
)
db.commit()
if serie_default_enabled is True:
added_result = c.executemany(
"""INSERT OR IGNORE INTO table_shows(title, path, tvdbId, languages,`hearing_impaired`, sonarrSeriesId, overview, poster, fanart, `audio_language`, sortTitle) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
series_to_add,
)
db.commit()
else:
added_result = c.executemany(
"""INSERT OR IGNORE INTO table_shows(title, path, tvdbId, languages,`hearing_impaired`, sonarrSeriesId, overview, poster, fanart, `audio_language`, sortTitle) VALUES (?,?,?,(SELECT languages FROM table_shows WHERE tvdbId = ?),(SELECT `hearing_impaired` FROM table_shows WHERE tvdbId = ?), ?, ?, ?, ?, ?, ?)""",
series_to_add,
)
db.commit()
db.close()
for show in series_to_add:
list_missing_subtitles(show[5])
# Delete shows not in Sonarr anymore
deleted_items = []
for item in current_shows_db_list:
if item not in current_shows_sonarr:
deleted_items.append(tuple([item]))
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
c.executemany("DELETE FROM table_shows WHERE tvdbId = ?", deleted_items)
db.commit()
db.close()
|
def update_series():
from get_settings import get_sonarr_settings
url_sonarr = get_sonarr_settings()[6]
apikey_sonarr = get_sonarr_settings()[4]
serie_default_enabled = get_general_settings()[15]
serie_default_language = get_general_settings()[16]
serie_default_hi = get_general_settings()[17]
if apikey_sonarr == None:
pass
else:
get_profile_list()
# Get shows data from Sonarr
url_sonarr_api_series = url_sonarr + "/api/series?apikey=" + apikey_sonarr
try:
r = requests.get(url_sonarr_api_series, timeout=15, verify=False)
r.raise_for_status()
except requests.exceptions.HTTPError as errh:
logging.exception("Error trying to get series from Sonarr. Http error.")
except requests.exceptions.ConnectionError as errc:
logging.exception(
"Error trying to get series from Sonarr. Connection Error."
)
except requests.exceptions.Timeout as errt:
logging.exception("Error trying to get series from Sonarr. Timeout Error.")
except requests.exceptions.RequestException as err:
logging.exception("Error trying to get series from Sonarr.")
else:
# Open database connection
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
# Get current shows in DB
current_shows_db = c.execute("SELECT tvdbId FROM table_shows").fetchall()
# Close database connection
db.close()
current_shows_db_list = [x[0] for x in current_shows_db]
current_shows_sonarr = []
series_to_update = []
series_to_add = []
for show in r.json():
try:
overview = unicode(show["overview"])
except:
overview = ""
try:
poster_big = show["images"][2]["url"].split("?")[0]
poster = (
os.path.splitext(poster_big)[0]
+ "-250"
+ os.path.splitext(poster_big)[1]
)
except:
poster = ""
try:
fanart = show["images"][0]["url"].split("?")[0]
except:
fanart = ""
# Add shows in Sonarr to current shows list
current_shows_sonarr.append(show["tvdbId"])
if show["tvdbId"] in current_shows_db_list:
series_to_update.append(
(
show["title"],
show["path"],
show["tvdbId"],
show["id"],
overview,
poster,
fanart,
profile_id_to_language(
(
show["qualityProfileId"]
if sonarr_version == 2
else show["languageProfileId"]
)
),
show["sortTitle"],
show["tvdbId"],
)
)
else:
if serie_default_enabled is True:
series_to_add.append(
(
show["title"],
show["path"],
show["tvdbId"],
serie_default_language,
serie_default_hi,
show["id"],
overview,
poster,
fanart,
profile_id_to_language(show["qualityProfileId"]),
show["sortTitle"],
)
)
else:
series_to_add.append(
(
show["title"],
show["path"],
show["tvdbId"],
show["tvdbId"],
show["tvdbId"],
show["id"],
overview,
poster,
fanart,
profile_id_to_language(show["qualityProfileId"]),
show["sortTitle"],
)
)
# Update or insert series in DB
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
updated_result = c.executemany(
"""UPDATE table_shows SET title = ?, path = ?, tvdbId = ?, sonarrSeriesId = ?, overview = ?, poster = ?, fanart = ?, `audio_language` = ? , sortTitle = ? WHERE tvdbid = ?""",
series_to_update,
)
db.commit()
if serie_default_enabled is True:
added_result = c.executemany(
"""INSERT INTO table_shows(title, path, tvdbId, languages,`hearing_impaired`, sonarrSeriesId, overview, poster, fanart, `audio_language`, sortTitle) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
series_to_add,
)
db.commit()
else:
added_result = c.executemany(
"""INSERT INTO table_shows(title, path, tvdbId, languages,`hearing_impaired`, sonarrSeriesId, overview, poster, fanart, `audio_language`, sortTitle) VALUES (?,?,?,(SELECT languages FROM table_shows WHERE tvdbId = ?),(SELECT `hearing_impaired` FROM table_shows WHERE tvdbId = ?), ?, ?, ?, ?, ?, ?)""",
series_to_add,
)
db.commit()
db.close()
for show in series_to_add:
list_missing_subtitles(show[5])
# Delete shows not in Sonarr anymore
deleted_items = []
for item in current_shows_db_list:
if item not in current_shows_sonarr:
deleted_items.append(tuple([item]))
db = sqlite3.connect(os.path.join(config_dir, "db/bazarr.db"), timeout=30)
c = db.cursor()
c.executemany("DELETE FROM table_shows WHERE tvdbId = ?", deleted_items)
db.commit()
db.close()
|
https://github.com/morpheus65535/bazarr/issues/153
|
26/09/2018 04:04:37|ERROR|Job "Update movies list from Radarr (trigger: interval[0:05:00], next run at: 2018-09-26 04:09:33 PDT)" raised an exception|'Traceback (most recent call last):\n File "/app/libs/apscheduler/executors/base.py", line 125, in run_job\n retval = job.func(*job.args, **job.kwargs)\n File "/app/get_movies.py", line 100, in update_movies\n added_result = c.executemany(\'\'\'INSERT INTO table_movies(title, path, tmdbId, languages, subtitles,`hearing_impaired`, radarrId, overview, poster, fanart, `audio_language`, sceneName, monitored) VALUES (?,?,?,(SELECT languages FROM table_movies WHERE tmdbId = ?), \'[]\',(SELECT `hearing_impaired` FROM table_movies WHERE tmdbId = ?), ?, ?, ?, ?, ?, ?, ?)\'\'\', movies_to_add)\nIntegrityError: UNIQUE constraint failed: table_movies.path'|
|
nIntegrityError
|
def save_subtitles(video, subtitles, single=False, directory=None, encoding=None):
"""Save subtitles on filesystem.
Subtitles are saved in the order of the list. If a subtitle with a language has already been saved, other subtitles
with the same language are silently ignored.
The extension used is `.lang.srt` by default or `.srt` is `single` is `True`, with `lang` being the IETF code for
the :attr:`~subliminal.subtitle.Subtitle.language` of the subtitle.
:param video: video of the subtitles.
:type video: :class:`~subliminal.video.Video`
:param subtitles: subtitles to save.
:type subtitles: list of :class:`~subliminal.subtitle.Subtitle`
:param bool single: save a single subtitle, default is to save one subtitle per language.
:param str directory: path to directory where to save the subtitles, default is next to the video.
:param str encoding: encoding in which to save the subtitles, default is to keep original encoding.
:return: the saved subtitles
:rtype: list of :class:`~subliminal.subtitle.Subtitle`
"""
saved_subtitles = []
for subtitle in subtitles:
# check content
if subtitle.content is None:
logger.error("Skipping subtitle %r: no content", subtitle)
continue
# check language
if subtitle.language in set(s.language for s in saved_subtitles):
logger.debug("Skipping subtitle %r: language already saved", subtitle)
continue
# create subtitle path
subtitle_path = None
subtitle_path = get_subtitle_path(
video.name, None if single else subtitle.language
)
if directory is not None:
subtitle_path = os.path.join(directory, os.path.split(subtitle_path)[1])
# save content as is or in the specified encoding
logger.info("Saving %r to %r", subtitle, subtitle_path)
if encoding is None:
with io.open(subtitle_path, "wb") as f:
f.write(subtitle.content)
else:
with io.open(subtitle_path, "w", encoding=encoding) as f:
f.write(subtitle.text)
saved_subtitles.append(subtitle)
# check single
if single:
break
return [saved_subtitles, subtitle_path]
|
def save_subtitles(video, subtitles, single=False, directory=None, encoding=None):
"""Save subtitles on filesystem.
Subtitles are saved in the order of the list. If a subtitle with a language has already been saved, other subtitles
with the same language are silently ignored.
The extension used is `.lang.srt` by default or `.srt` is `single` is `True`, with `lang` being the IETF code for
the :attr:`~subliminal.subtitle.Subtitle.language` of the subtitle.
:param video: video of the subtitles.
:type video: :class:`~subliminal.video.Video`
:param subtitles: subtitles to save.
:type subtitles: list of :class:`~subliminal.subtitle.Subtitle`
:param bool single: save a single subtitle, default is to save one subtitle per language.
:param str directory: path to directory where to save the subtitles, default is next to the video.
:param str encoding: encoding in which to save the subtitles, default is to keep original encoding.
:return: the saved subtitles
:rtype: list of :class:`~subliminal.subtitle.Subtitle`
"""
saved_subtitles = []
for subtitle in subtitles:
# check content
if subtitle.content is None:
logger.error("Skipping subtitle %r: no content", subtitle)
continue
# check language
if subtitle.language in set(s.language for s in saved_subtitles):
logger.debug("Skipping subtitle %r: language already saved", subtitle)
continue
# create subtitle path
subtitle_path = get_subtitle_path(
video.name, None if single else subtitle.language
)
if directory is not None:
subtitle_path = os.path.join(directory, os.path.split(subtitle_path)[1])
# save content as is or in the specified encoding
logger.info("Saving %r to %r", subtitle, subtitle_path)
if encoding is None:
with io.open(subtitle_path, "wb") as f:
f.write(subtitle.content)
else:
with io.open(subtitle_path, "w", encoding=encoding) as f:
f.write(subtitle.text)
saved_subtitles.append(subtitle)
# check single
if single:
break
return [saved_subtitles, subtitle_path]
|
https://github.com/morpheus65535/bazarr/issues/158
|
Traceback (most recent call last):
File "c:\\bazarr\\get_subtitle.py", line 215, in manual_download_subtitle
result = save_subtitles(video, [best_subtitle], single=True, encoding=\utf-8\)
File "c:\\bazarr\\libs/subliminal\\core.py", line 771, in save_subtitles
return [saved_subtitles, subtitle_path]
UnboundLocalError: local variable \subtitle_path\ referenced before assignment
|
UnboundLocalError
|
def download_subtitle(
path, language, hi, providers, providers_auth, sceneName, media_type
):
if hi == "True":
hi = True
else:
hi = False
if media_type == "series":
type_of_score = 360
minimum_score = float(get_general_settings()[8]) / 100 * type_of_score
elif media_type == "movie":
type_of_score = 120
minimum_score = float(get_general_settings()[22]) / 100 * type_of_score
use_scenename = get_general_settings()[9]
use_postprocessing = get_general_settings()[10]
postprocessing_cmd = get_general_settings()[11]
if language == "pob":
lang_obj = Language("por", "BR")
else:
lang_obj = Language(language)
try:
if sceneName is None or use_scenename is False:
used_sceneName = False
video = scan_video(path)
else:
used_sceneName = True
video = Video.fromname(sceneName)
except Exception as e:
logging.exception(
"Error trying to extract information from this filename: " + path
)
return None
else:
try:
best_subtitles = download_best_subtitles(
[video],
{lang_obj},
providers=providers,
min_score=minimum_score,
hearing_impaired=hi,
provider_configs=providers_auth,
)
except Exception as e:
logging.exception(
"Error trying to get the best subtitles for this file: " + path
)
return None
else:
try:
best_subtitle = best_subtitles[video][0]
except:
logging.debug("No subtitles found for " + path)
return None
else:
single = get_general_settings()[7]
try:
score = round(
float(compute_score(best_subtitle, video, hearing_impaired=hi))
/ type_of_score
* 100,
2,
)
if used_sceneName == True:
video = scan_video(path)
if single is True:
result = save_subtitles(
video, [best_subtitle], single=True, encoding="utf-8"
)
else:
result = save_subtitles(
video, [best_subtitle], encoding="utf-8"
)
except:
logging.error("Error saving subtitles file to disk.")
return None
else:
downloaded_provider = str(result[0]).strip("<>").split(" ")[0][:-8]
downloaded_language = language_from_alpha3(language)
downloaded_language_code2 = alpha2_from_alpha3(language)
downloaded_language_code3 = language
downloaded_path = get_subtitle_path(path, language=lang_obj)
if used_sceneName == True:
message = (
downloaded_language
+ " subtitles downloaded from "
+ downloaded_provider
+ " with a score of "
+ unicode(score)
+ "% using this scene name: "
+ sceneName
)
else:
message = (
downloaded_language
+ " subtitles downloaded from "
+ downloaded_provider
+ " with a score of "
+ unicode(score)
+ "% using filename guessing."
)
if use_postprocessing is True:
command = pp_replace(
postprocessing_cmd,
path,
downloaded_path,
downloaded_language,
downloaded_language_code2,
downloaded_language_code3,
)
try:
if os.name == "nt":
codepage = subprocess.Popen(
"chcp",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# wait for the process to terminate
out_codepage, err_codepage = codepage.communicate()
encoding = out_codepage.split(":")[-1].strip()
process = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# wait for the process to terminate
out, err = process.communicate()
if os.name == "nt":
out = out.decode(encoding)
except:
if out == "":
logging.error(
"Post-processing result for file "
+ path
+ " : Nothing returned from command execution"
)
else:
logging.error(
"Post-processing result for file "
+ path
+ " : "
+ out
)
else:
if out == "":
logging.info(
"Post-processing result for file "
+ path
+ " : Nothing returned from command execution"
)
else:
logging.info(
"Post-processing result for file "
+ path
+ " : "
+ out
)
return message
|
def download_subtitle(
path, language, hi, providers, providers_auth, sceneName, media_type
):
if hi == "True":
hi = True
else:
hi = False
if media_type == "series":
type_of_score = 360
minimum_score = float(get_general_settings()[8]) / 100 * type_of_score
elif media_type == "movie":
type_of_score = 120
minimum_score = float(get_general_settings()[22]) / 100 * type_of_score
use_scenename = get_general_settings()[9]
use_postprocessing = get_general_settings()[10]
postprocessing_cmd = get_general_settings()[11]
if language == "pob":
lang_obj = Language("por", "BR")
else:
lang_obj = Language(language)
try:
if sceneName is None or use_scenename is False:
used_sceneName = False
video = scan_video(path)
else:
used_sceneName = True
video = Video.fromname(sceneName)
except Exception as e:
logging.exception(
"Error trying to extract information from this filename: " + path
)
return None
else:
try:
best_subtitles = download_best_subtitles(
[video],
{lang_obj},
providers=providers,
min_score=minimum_score,
hearing_impaired=hi,
provider_configs=providers_auth,
)
except Exception as e:
logging.exception(
"Error trying to get the best subtitles for this file: " + path
)
return None
else:
try:
best_subtitle = best_subtitles[video][0]
except:
logging.debug("No subtitles found for " + path)
return None
else:
single = get_general_settings()[7]
try:
score = round(
float(compute_score(best_subtitle, video, hearing_impaired=hi))
/ type_of_score
* 100,
2,
)
if used_sceneName == True:
video = scan_video(path)
if single is True:
result = save_subtitles(
video, [best_subtitle], single=True, encoding="utf-8"
)
else:
result = save_subtitles(
video, [best_subtitle], encoding="utf-8"
)
except:
logging.error("Error saving subtitles file to disk.")
return None
else:
downloaded_provider = (
str(result[0][0]).strip("<>").split(" ")[0][:-8]
)
downloaded_language = language_from_alpha3(language)
downloaded_language_code2 = alpha2_from_alpha3(language)
downloaded_language_code3 = language
downloaded_path = result[1]
if used_sceneName == True:
message = (
downloaded_language
+ " subtitles downloaded from "
+ downloaded_provider
+ " with a score of "
+ unicode(score)
+ "% using this scene name: "
+ sceneName
)
else:
message = (
downloaded_language
+ " subtitles downloaded from "
+ downloaded_provider
+ " with a score of "
+ unicode(score)
+ "% using filename guessing."
)
if use_postprocessing is True:
command = pp_replace(
postprocessing_cmd,
path,
downloaded_path,
downloaded_language,
downloaded_language_code2,
downloaded_language_code3,
)
try:
if os.name == "nt":
codepage = subprocess.Popen(
"chcp",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# wait for the process to terminate
out_codepage, err_codepage = codepage.communicate()
encoding = out_codepage.split(":")[-1].strip()
process = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# wait for the process to terminate
out, err = process.communicate()
if os.name == "nt":
out = out.decode(encoding)
except:
if out == "":
logging.error(
"Post-processing result for file "
+ path
+ " : Nothing returned from command execution"
)
else:
logging.error(
"Post-processing result for file "
+ path
+ " : "
+ out
)
else:
if out == "":
logging.info(
"Post-processing result for file "
+ path
+ " : Nothing returned from command execution"
)
else:
logging.info(
"Post-processing result for file "
+ path
+ " : "
+ out
)
return message
|
https://github.com/morpheus65535/bazarr/issues/158
|
Traceback (most recent call last):
File "c:\\bazarr\\get_subtitle.py", line 215, in manual_download_subtitle
result = save_subtitles(video, [best_subtitle], single=True, encoding=\utf-8\)
File "c:\\bazarr\\libs/subliminal\\core.py", line 771, in save_subtitles
return [saved_subtitles, subtitle_path]
UnboundLocalError: local variable \subtitle_path\ referenced before assignment
|
UnboundLocalError
|
def manual_download_subtitle(
path, language, hi, subtitle, provider, providers_auth, sceneName, media_type
):
if hi == "True":
hi = True
else:
hi = False
subtitle = pickle.loads(codecs.decode(subtitle.encode(), "base64"))
if media_type == "series":
type_of_score = 360
elif media_type == "movie":
type_of_score = 120
use_scenename = get_general_settings()[9]
use_postprocessing = get_general_settings()[10]
postprocessing_cmd = get_general_settings()[11]
if language == "pb":
language = alpha3_from_alpha2(language)
lang_obj = Language("por", "BR")
else:
language = alpha3_from_alpha2(language)
lang_obj = Language(language)
try:
if sceneName is None or use_scenename is False:
used_sceneName = False
video = scan_video(path)
else:
used_sceneName = True
video = Video.fromname(sceneName)
except Exception as e:
logging.exception(
"Error trying to extract information from this filename: " + path
)
return None
else:
try:
best_subtitle = subtitle
download_subtitles(
[best_subtitle], providers=provider, provider_configs=providers_auth
)
except Exception as e:
logging.exception("Error downloading subtitles for " + path)
return None
else:
single = get_general_settings()[7]
try:
score = round(
float(compute_score(best_subtitle, video, hearing_impaired=hi))
/ type_of_score
* 100,
2,
)
if used_sceneName == True:
video = scan_video(path)
if single is True:
result = save_subtitles(
video, [best_subtitle], single=True, encoding="utf-8"
)
else:
result = save_subtitles(video, [best_subtitle], encoding="utf-8")
except Exception as e:
logging.exception("Error saving subtitles file to disk.")
return None
else:
downloaded_provider = str(result[0]).strip("<>").split(" ")[0][:-8]
downloaded_language = language_from_alpha3(language)
downloaded_language_code2 = alpha2_from_alpha3(language)
downloaded_language_code3 = language
downloaded_path = get_subtitle_path(path, language=lang_obj)
message = (
downloaded_language
+ " subtitles downloaded from "
+ downloaded_provider
+ " with a score of "
+ unicode(score)
+ "% using manual search."
)
if use_postprocessing is True:
command = pp_replace(
postprocessing_cmd,
path,
downloaded_path,
downloaded_language,
downloaded_language_code2,
downloaded_language_code3,
)
try:
if os.name == "nt":
codepage = subprocess.Popen(
"chcp",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# wait for the process to terminate
out_codepage, err_codepage = codepage.communicate()
encoding = out_codepage.split(":")[-1].strip()
process = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# wait for the process to terminate
out, err = process.communicate()
if os.name == "nt":
out = out.decode(encoding)
except:
if out == "":
logging.error(
"Post-processing result for file "
+ path
+ " : Nothing returned from command execution"
)
else:
logging.error(
"Post-processing result for file " + path + " : " + out
)
else:
if out == "":
logging.info(
"Post-processing result for file "
+ path
+ " : Nothing returned from command execution"
)
else:
logging.info(
"Post-processing result for file " + path + " : " + out
)
return message
|
def manual_download_subtitle(
path, language, hi, subtitle, provider, providers_auth, sceneName, media_type
):
if hi == "True":
hi = True
else:
hi = False
subtitle = pickle.loads(codecs.decode(subtitle.encode(), "base64"))
if media_type == "series":
type_of_score = 360
elif media_type == "movie":
type_of_score = 120
use_scenename = get_general_settings()[9]
use_postprocessing = get_general_settings()[10]
postprocessing_cmd = get_general_settings()[11]
if language == "pb":
language = alpha3_from_alpha2(language)
lang_obj = Language("por", "BR")
else:
language = alpha3_from_alpha2(language)
lang_obj = Language(language)
try:
if sceneName is None or use_scenename is False:
used_sceneName = False
video = scan_video(path)
else:
used_sceneName = True
video = Video.fromname(sceneName)
except Exception as e:
logging.exception(
"Error trying to extract information from this filename: " + path
)
return None
else:
try:
best_subtitle = subtitle
download_subtitles(
[best_subtitle], providers=provider, provider_configs=providers_auth
)
except Exception as e:
logging.exception("Error downloading subtitles for " + path)
return None
else:
single = get_general_settings()[7]
try:
score = round(
float(compute_score(best_subtitle, video, hearing_impaired=hi))
/ type_of_score
* 100,
2,
)
if used_sceneName == True:
video = scan_video(path)
if single is True:
result = save_subtitles(
video, [best_subtitle], single=True, encoding="utf-8"
)
else:
result = save_subtitles(video, [best_subtitle], encoding="utf-8")
except Exception as e:
logging.exception("Error saving subtitles file to disk.")
return None
else:
downloaded_provider = str(result[0][0]).strip("<>").split(" ")[0][:-8]
downloaded_language = language_from_alpha3(language)
downloaded_language_code2 = alpha2_from_alpha3(language)
downloaded_language_code3 = language
downloaded_path = result[1]
message = (
downloaded_language
+ " subtitles downloaded from "
+ downloaded_provider
+ " with a score of "
+ unicode(score)
+ "% using manual search."
)
if use_postprocessing is True:
command = pp_replace(
postprocessing_cmd,
path,
downloaded_path,
downloaded_language,
downloaded_language_code2,
downloaded_language_code3,
)
try:
if os.name == "nt":
codepage = subprocess.Popen(
"chcp",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# wait for the process to terminate
out_codepage, err_codepage = codepage.communicate()
encoding = out_codepage.split(":")[-1].strip()
process = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# wait for the process to terminate
out, err = process.communicate()
if os.name == "nt":
out = out.decode(encoding)
except:
if out == "":
logging.error(
"Post-processing result for file "
+ path
+ " : Nothing returned from command execution"
)
else:
logging.error(
"Post-processing result for file " + path + " : " + out
)
else:
if out == "":
logging.info(
"Post-processing result for file "
+ path
+ " : Nothing returned from command execution"
)
else:
logging.info(
"Post-processing result for file " + path + " : " + out
)
return message
|
https://github.com/morpheus65535/bazarr/issues/158
|
Traceback (most recent call last):
File "c:\\bazarr\\get_subtitle.py", line 215, in manual_download_subtitle
result = save_subtitles(video, [best_subtitle], single=True, encoding=\utf-8\)
File "c:\\bazarr\\libs/subliminal\\core.py", line 771, in save_subtitles
return [saved_subtitles, subtitle_path]
UnboundLocalError: local variable \subtitle_path\ referenced before assignment
|
UnboundLocalError
|
def download(
obj,
provider,
refiner,
language,
age,
directory,
encoding,
single,
force,
hearing_impaired,
min_score,
max_workers,
archives,
verbose,
path,
):
"""Download best subtitles.
PATH can be an directory containing videos, a video file path or a video file name. It can be used multiple times.
If an existing subtitle is detected (external or embedded) in the correct language, the download is skipped for
the associated video.
"""
# process parameters
language = set(language)
# scan videos
videos = []
ignored_videos = []
errored_paths = []
with click.progressbar(
path, label="Collecting videos", item_show_func=lambda p: p or ""
) as bar:
for p in bar:
logger.debug("Collecting path %s", p)
# non-existing
if not os.path.exists(p):
try:
video = Video.fromname(p)
except:
logger.exception(
"Unexpected error while collecting non-existing path %s", p
)
errored_paths.append(p)
continue
if not force:
video.subtitle_languages |= set(
search_external_subtitles(
video.name, directory=directory
).values()
)
refine(
video,
episode_refiners=refiner,
movie_refiners=refiner,
embedded_subtitles=not force,
)
videos.append(video)
continue
# directories
if os.path.isdir(p):
try:
scanned_videos = scan_videos(p, age=age, archives=archives)
except:
logger.exception(
"Unexpected error while collecting directory path %s", p
)
errored_paths.append(p)
continue
for video in scanned_videos:
if not force:
video.subtitle_languages |= set(
search_external_subtitles(
video.name, directory=directory
).values()
)
if check_video(
video, languages=language, age=age, undefined=single
):
refine(
video,
episode_refiners=refiner,
movie_refiners=refiner,
embedded_subtitles=not force,
)
videos.append(video)
else:
ignored_videos.append(video)
continue
# other inputs
try:
video = scan_video(p)
except:
logger.exception("Unexpected error while collecting path %s", p)
errored_paths.append(p)
continue
if not force:
video.subtitle_languages |= set(
search_external_subtitles(video.name, directory=directory).values()
)
if check_video(video, languages=language, age=age, undefined=single):
refine(
video,
episode_refiners=refiner,
movie_refiners=refiner,
embedded_subtitles=not force,
)
videos.append(video)
else:
ignored_videos.append(video)
# output errored paths
if verbose > 0:
for p in errored_paths:
click.secho("%s errored" % p, fg="red")
# output ignored videos
if verbose > 1:
for video in ignored_videos:
click.secho(
"%s ignored - subtitles: %s / age: %d day%s"
% (
os.path.split(video.name)[1],
", ".join(str(s) for s in video.subtitle_languages) or "none",
video.age.days,
"s" if video.age.days > 1 else "",
),
fg="yellow",
)
# report collected videos
click.echo(
"%s video%s collected / %s video%s ignored / %s error%s"
% (
click.style(str(len(videos)), bold=True, fg="green" if videos else None),
"s" if len(videos) > 1 else "",
click.style(
str(len(ignored_videos)),
bold=True,
fg="yellow" if ignored_videos else None,
),
"s" if len(ignored_videos) > 1 else "",
click.style(
str(len(errored_paths)), bold=True, fg="red" if errored_paths else None
),
"s" if len(errored_paths) > 1 else "",
)
)
# exit if no video collected
if not videos:
return
# download best subtitles
downloaded_subtitles = defaultdict(list)
with AsyncProviderPool(
max_workers=max_workers,
providers=provider,
provider_configs=obj["provider_configs"],
) as p:
with click.progressbar(
videos,
label="Downloading subtitles",
item_show_func=lambda v: os.path.split(v.name)[1] if v is not None else "",
) as bar:
for v in bar:
scores = get_scores(v)
subtitles = p.download_best_subtitles(
p.list_subtitles(v, language - v.subtitle_languages),
v,
language,
min_score=scores["hash"] * min_score / 100,
hearing_impaired=hearing_impaired,
only_one=single,
)
downloaded_subtitles[v] = subtitles
if p.discarded_providers:
click.secho(
"Some providers have been discarded due to unexpected errors: %s"
% ", ".join(p.discarded_providers),
fg="yellow",
)
# save subtitles
total_subtitles = 0
for v, subtitles in downloaded_subtitles.items():
saved_subtitles = save_subtitles(
v, subtitles, single=single, directory=directory, encoding=encoding
)
total_subtitles += len(saved_subtitles)
if verbose > 0:
click.echo(
"%s subtitle%s downloaded for %s"
% (
click.style(str(len(saved_subtitles)), bold=True),
"s" if len(saved_subtitles) > 1 else "",
os.path.split(v.name)[1],
)
)
if verbose > 1:
for s in saved_subtitles:
matches = s.get_matches(v)
score = compute_score(s, v)
# score color
score_color = None
scores = get_scores(v)
if isinstance(v, Movie):
if score < scores["title"]:
score_color = "red"
elif (
score
< scores["title"] + scores["year"] + scores["release_group"]
):
score_color = "yellow"
else:
score_color = "green"
elif isinstance(v, Episode):
if score < scores["series"] + scores["season"] + scores["episode"]:
score_color = "red"
elif (
score
< scores["series"]
+ scores["season"]
+ scores["episode"]
+ scores["release_group"]
):
score_color = "yellow"
else:
score_color = "green"
# scale score from 0 to 100 taking out preferences
scaled_score = score
if s.hearing_impaired == hearing_impaired:
scaled_score -= scores["hearing_impaired"]
scaled_score *= 100 / scores["hash"]
# echo some nice colored output
click.echo(
" - [{score}] {language} subtitle from {provider_name} (match on {matches})".format(
score=click.style(
"{:5.1f}".format(scaled_score),
fg=score_color,
bold=score >= scores["hash"],
),
language=s.language.name
if s.language.country is None
else "%s (%s)" % (s.language.name, s.language.country.name),
provider_name=s.provider_name,
matches=", ".join(
sorted(matches, key=scores.get, reverse=True)
),
)
)
if verbose == 0:
click.echo(
"Downloaded %s subtitle%s"
% (
click.style(str(total_subtitles), bold=True),
"s" if total_subtitles > 1 else "",
)
)
|
def download(
obj,
provider,
refiner,
language,
age,
directory,
encoding,
single,
force,
hearing_impaired,
min_score,
max_workers,
archives,
verbose,
path,
):
"""Download best subtitles.
PATH can be an directory containing videos, a video file path or a video file name. It can be used multiple times.
If an existing subtitle is detected (external or embedded) in the correct language, the download is skipped for
the associated video.
"""
# process parameters
language = set(language)
# scan videos
videos = []
ignored_videos = []
errored_paths = []
with click.progressbar(
path, label="Collecting videos", item_show_func=lambda p: p or ""
) as bar:
for p in bar:
logger.debug("Collecting path %s", p)
# non-existing
if not os.path.exists(p):
try:
video = Video.fromname(p)
except:
logger.exception(
"Unexpected error while collecting non-existing path %s", p
)
errored_paths.append(p)
continue
if not force:
video.subtitle_languages |= set(
search_external_subtitles(
video.name, directory=directory
).values()
)
refine(
video,
episode_refiners=refiner,
movie_refiners=refiner,
embedded_subtitles=not force,
)
videos.append(video)
continue
# directories
if os.path.isdir(p):
try:
scanned_videos = scan_videos(p, age=age, archives=archives)
except:
logger.exception(
"Unexpected error while collecting directory path %s", p
)
errored_paths.append(p)
continue
for video in scanned_videos:
if not force:
video.subtitle_languages |= set(
search_external_subtitles(
video.name, directory=directory
).values()
)
if check_video(
video, languages=language, age=age, undefined=single
):
refine(
video,
episode_refiners=refiner,
movie_refiners=refiner,
embedded_subtitles=not force,
)
videos.append(video)
else:
ignored_videos.append(video)
continue
# other inputs
try:
video = scan_video(p)
except:
logger.exception("Unexpected error while collecting path %s", p)
errored_paths.append(p)
continue
if not force:
video.subtitle_languages |= set(
search_external_subtitles(video.name, directory=directory).values()
)
if check_video(video, languages=language, age=age, undefined=single):
refine(
video,
episode_refiners=refiner,
movie_refiners=refiner,
embedded_subtitles=not force,
)
videos.append(video)
else:
ignored_videos.append(video)
# output errored paths
if verbose > 0:
for p in errored_paths:
click.secho("%s errored" % p, fg="red")
# output ignored videos
if verbose > 1:
for video in ignored_videos:
click.secho(
"%s ignored - subtitles: %s / age: %d day%s"
% (
os.path.split(video.name)[1],
", ".join(str(s) for s in video.subtitle_languages) or "none",
video.age.days,
"s" if video.age.days > 1 else "",
),
fg="yellow",
)
# report collected videos
click.echo(
"%s video%s collected / %s video%s ignored / %s error%s"
% (
click.style(str(len(videos)), bold=True, fg="green" if videos else None),
"s" if len(videos) > 1 else "",
click.style(
str(len(ignored_videos)),
bold=True,
fg="yellow" if ignored_videos else None,
),
"s" if len(ignored_videos) > 1 else "",
click.style(
str(len(errored_paths)), bold=True, fg="red" if errored_paths else None
),
"s" if len(errored_paths) > 1 else "",
)
)
# exit if no video collected
if not videos:
return
# download best subtitles
downloaded_subtitles = defaultdict(list)
with AsyncProviderPool(
max_workers=max_workers,
providers=provider,
provider_configs=obj["provider_configs"],
) as p:
with click.progressbar(
videos,
label="Downloading subtitles",
item_show_func=lambda v: os.path.split(v.name)[1] if v is not None else "",
) as bar:
for v in bar:
scores = get_scores(v)
subtitles = p.download_best_subtitles(
p.list_subtitles(v, language - v.subtitle_languages),
v,
language,
min_score=scores["hash"] * min_score / 100,
hearing_impaired=hearing_impaired,
only_one=single,
)
downloaded_subtitles[v] = subtitles
if p.discarded_providers:
click.secho(
"Some providers have been discarded due to unexpected errors: %s"
% ", ".join(p.discarded_providers),
fg="yellow",
)
# save subtitles
total_subtitles = 0
for v, subtitles in downloaded_subtitles.items():
saved_subtitles = save_subtitles(
v, subtitles, single=single, directory=directory, encoding=encoding
)[0]
total_subtitles += len(saved_subtitles)
if verbose > 0:
click.echo(
"%s subtitle%s downloaded for %s"
% (
click.style(str(len(saved_subtitles)), bold=True),
"s" if len(saved_subtitles) > 1 else "",
os.path.split(v.name)[1],
)
)
if verbose > 1:
for s in saved_subtitles:
matches = s.get_matches(v)
score = compute_score(s, v)
# score color
score_color = None
scores = get_scores(v)
if isinstance(v, Movie):
if score < scores["title"]:
score_color = "red"
elif (
score
< scores["title"] + scores["year"] + scores["release_group"]
):
score_color = "yellow"
else:
score_color = "green"
elif isinstance(v, Episode):
if score < scores["series"] + scores["season"] + scores["episode"]:
score_color = "red"
elif (
score
< scores["series"]
+ scores["season"]
+ scores["episode"]
+ scores["release_group"]
):
score_color = "yellow"
else:
score_color = "green"
# scale score from 0 to 100 taking out preferences
scaled_score = score
if s.hearing_impaired == hearing_impaired:
scaled_score -= scores["hearing_impaired"]
scaled_score *= 100 / scores["hash"]
# echo some nice colored output
click.echo(
" - [{score}] {language} subtitle from {provider_name} (match on {matches})".format(
score=click.style(
"{:5.1f}".format(scaled_score),
fg=score_color,
bold=score >= scores["hash"],
),
language=s.language.name
if s.language.country is None
else "%s (%s)" % (s.language.name, s.language.country.name),
provider_name=s.provider_name,
matches=", ".join(
sorted(matches, key=scores.get, reverse=True)
),
)
)
if verbose == 0:
click.echo(
"Downloaded %s subtitle%s"
% (
click.style(str(total_subtitles), bold=True),
"s" if total_subtitles > 1 else "",
)
)
|
https://github.com/morpheus65535/bazarr/issues/158
|
Traceback (most recent call last):
File "c:\\bazarr\\get_subtitle.py", line 215, in manual_download_subtitle
result = save_subtitles(video, [best_subtitle], single=True, encoding=\utf-8\)
File "c:\\bazarr\\libs/subliminal\\core.py", line 771, in save_subtitles
return [saved_subtitles, subtitle_path]
UnboundLocalError: local variable \subtitle_path\ referenced before assignment
|
UnboundLocalError
|
def save_subtitles(video, subtitles, single=False, directory=None, encoding=None):
"""Save subtitles on filesystem.
Subtitles are saved in the order of the list. If a subtitle with a language has already been saved, other subtitles
with the same language are silently ignored.
The extension used is `.lang.srt` by default or `.srt` is `single` is `True`, with `lang` being the IETF code for
the :attr:`~subliminal.subtitle.Subtitle.language` of the subtitle.
:param video: video of the subtitles.
:type video: :class:`~subliminal.video.Video`
:param subtitles: subtitles to save.
:type subtitles: list of :class:`~subliminal.subtitle.Subtitle`
:param bool single: save a single subtitle, default is to save one subtitle per language.
:param str directory: path to directory where to save the subtitles, default is next to the video.
:param str encoding: encoding in which to save the subtitles, default is to keep original encoding.
:return: the saved subtitles
:rtype: list of :class:`~subliminal.subtitle.Subtitle`
"""
saved_subtitles = []
for subtitle in subtitles:
# check content
if subtitle.content is None:
logger.error("Skipping subtitle %r: no content", subtitle)
continue
# check language
if subtitle.language in set(s.language for s in saved_subtitles):
logger.debug("Skipping subtitle %r: language already saved", subtitle)
continue
# create subtitle path
subtitle_path = get_subtitle_path(
video.name, None if single else subtitle.language
)
if directory is not None:
subtitle_path = os.path.join(directory, os.path.split(subtitle_path)[1])
# save content as is or in the specified encoding
logger.info("Saving %r to %r", subtitle, subtitle_path)
if encoding is None:
with io.open(subtitle_path, "wb") as f:
f.write(subtitle.content)
else:
with io.open(subtitle_path, "w", encoding=encoding) as f:
f.write(subtitle.text)
saved_subtitles.append(subtitle)
# check single
if single:
break
return saved_subtitles
|
def save_subtitles(video, subtitles, single=False, directory=None, encoding=None):
"""Save subtitles on filesystem.
Subtitles are saved in the order of the list. If a subtitle with a language has already been saved, other subtitles
with the same language are silently ignored.
The extension used is `.lang.srt` by default or `.srt` is `single` is `True`, with `lang` being the IETF code for
the :attr:`~subliminal.subtitle.Subtitle.language` of the subtitle.
:param video: video of the subtitles.
:type video: :class:`~subliminal.video.Video`
:param subtitles: subtitles to save.
:type subtitles: list of :class:`~subliminal.subtitle.Subtitle`
:param bool single: save a single subtitle, default is to save one subtitle per language.
:param str directory: path to directory where to save the subtitles, default is next to the video.
:param str encoding: encoding in which to save the subtitles, default is to keep original encoding.
:return: the saved subtitles
:rtype: list of :class:`~subliminal.subtitle.Subtitle`
"""
saved_subtitles = []
for subtitle in subtitles:
# check content
if subtitle.content is None:
logger.error("Skipping subtitle %r: no content", subtitle)
continue
# check language
if subtitle.language in set(s.language for s in saved_subtitles):
logger.debug("Skipping subtitle %r: language already saved", subtitle)
continue
# create subtitle path
subtitle_path = None
subtitle_path = get_subtitle_path(
video.name, None if single else subtitle.language
)
if directory is not None:
subtitle_path = os.path.join(directory, os.path.split(subtitle_path)[1])
# save content as is or in the specified encoding
logger.info("Saving %r to %r", subtitle, subtitle_path)
if encoding is None:
with io.open(subtitle_path, "wb") as f:
f.write(subtitle.content)
else:
with io.open(subtitle_path, "w", encoding=encoding) as f:
f.write(subtitle.text)
saved_subtitles.append(subtitle)
# check single
if single:
break
return [saved_subtitles, subtitle_path]
|
https://github.com/morpheus65535/bazarr/issues/158
|
Traceback (most recent call last):
File "c:\\bazarr\\get_subtitle.py", line 215, in manual_download_subtitle
result = save_subtitles(video, [best_subtitle], single=True, encoding=\utf-8\)
File "c:\\bazarr\\libs/subliminal\\core.py", line 771, in save_subtitles
return [saved_subtitles, subtitle_path]
UnboundLocalError: local variable \subtitle_path\ referenced before assignment
|
UnboundLocalError
|
def _get_filters_from_where_node(self, where_node, check_only=False):
# Check if this is a leaf node
if isinstance(where_node, Lookup):
if isinstance(where_node.lhs, ExtractDate):
if isinstance(where_node.lhs, ExtractYear):
field_attname = where_node.lhs.lhs.target.attname
else:
raise FilterError(
'Cannot apply filter on search results: "'
+ where_node.lhs.lookup_name
+ '" queries are not supported.'
)
else:
field_attname = where_node.lhs.target.attname
lookup = where_node.lookup_name
value = where_node.rhs
# Ignore pointer fields that show up in specific page type queries
if field_attname.endswith("_ptr_id"):
return
# Process the filter
return self._process_filter(field_attname, lookup, value, check_only=check_only)
elif isinstance(where_node, SubqueryConstraint):
raise FilterError(
"Could not apply filter on search results: Subqueries are not allowed."
)
elif isinstance(where_node, WhereNode):
# Get child filters
connector = where_node.connector
child_filters = [
self._get_filters_from_where_node(child) for child in where_node.children
]
if not check_only:
child_filters = [
child_filter for child_filter in child_filters if child_filter
]
return self._connect_filters(child_filters, connector, where_node.negated)
else:
raise FilterError(
"Could not apply filter on search results: Unknown where node: "
+ str(type(where_node))
)
|
def _get_filters_from_where_node(self, where_node, check_only=False):
# Check if this is a leaf node
if isinstance(where_node, Lookup):
field_attname = where_node.lhs.target.attname
lookup = where_node.lookup_name
value = where_node.rhs
# Ignore pointer fields that show up in specific page type queries
if field_attname.endswith("_ptr_id"):
return
# Process the filter
return self._process_filter(field_attname, lookup, value, check_only=check_only)
elif isinstance(where_node, SubqueryConstraint):
raise FilterError(
"Could not apply filter on search results: Subqueries are not allowed."
)
elif isinstance(where_node, WhereNode):
# Get child filters
connector = where_node.connector
child_filters = [
self._get_filters_from_where_node(child) for child in where_node.children
]
if not check_only:
child_filters = [
child_filter for child_filter in child_filters if child_filter
]
return self._connect_filters(child_filters, connector, where_node.negated)
else:
raise FilterError(
"Could not apply filter on search results: Unknown where node: "
+ str(type(where_node))
)
|
https://github.com/wagtail/wagtail/issues/5967
|
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Users/gadirrustamli/anaconda/envs/wagtail-test/lib/python3.8/site-packages/wagtail/search/queryset.py", line 11, in search
return search_backend.search(query, self, fields=fields,
File "/Users/gadirrustamli/anaconda/envs/wagtail-test/lib/python3.8/site-packages/wagtail/search/backends/base.py", line 363, in search
return self._search(
File "/Users/gadirrustamli/anaconda/envs/wagtail-test/lib/python3.8/site-packages/wagtail/search/backends/base.py", line 358, in _search
search_query.check()
File "/Users/gadirrustamli/anaconda/envs/wagtail-test/lib/python3.8/site-packages/wagtail/search/backends/base.py", line 157, in check
self._get_filters_from_where_node(self.queryset.query.where, check_only=True)
File "/Users/gadirrustamli/anaconda/envs/wagtail-test/lib/python3.8/site-packages/wagtail/search/backends/base.py", line 108, in _get_filters_from_where_node
child_filters = [self._get_filters_from_where_node(child) for child in where_node.children]
File "/Users/gadirrustamli/anaconda/envs/wagtail-test/lib/python3.8/site-packages/wagtail/search/backends/base.py", line 108, in <listcomp>
child_filters = [self._get_filters_from_where_node(child) for child in where_node.children]
File "/Users/gadirrustamli/anaconda/envs/wagtail-test/lib/python3.8/site-packages/wagtail/search/backends/base.py", line 91, in _get_filters_from_where_node
field_attname = where_node.lhs.target.attname
AttributeError: 'ExtractYear' object has no attribute 'target'
|
AttributeError
|
def find_embed(self, url, max_width=None):
# Find provider
endpoint = self._get_endpoint(url)
if endpoint is None:
raise EmbedNotFoundException
# Work out params
params = self.options.copy()
params["url"] = url
params["format"] = "json"
if max_width:
params["maxwidth"] = max_width
# Perform request
request = Request(endpoint + "?" + urlencode(params))
request.add_header("User-agent", "Mozilla/5.0")
try:
r = urllib_request.urlopen(request)
oembed = json.loads(r.read().decode("utf-8"))
except (URLError, json.decoder.JSONDecodeError):
raise EmbedNotFoundException
# Convert photos into HTML
if oembed["type"] == "photo":
html = '<img src="%s" alt="">' % (oembed["url"],)
else:
html = oembed.get("html")
# Return embed as a dict
return {
"title": oembed["title"] if "title" in oembed else "",
"author_name": oembed["author_name"] if "author_name" in oembed else "",
"provider_name": oembed["provider_name"] if "provider_name" in oembed else "",
"type": oembed["type"],
"thumbnail_url": oembed.get("thumbnail_url"),
"width": oembed.get("width"),
"height": oembed.get("height"),
"html": html,
}
|
def find_embed(self, url, max_width=None):
# Find provider
endpoint = self._get_endpoint(url)
if endpoint is None:
raise EmbedNotFoundException
# Work out params
params = self.options.copy()
params["url"] = url
params["format"] = "json"
if max_width:
params["maxwidth"] = max_width
# Perform request
request = Request(endpoint + "?" + urlencode(params))
request.add_header("User-agent", "Mozilla/5.0")
try:
r = urllib_request.urlopen(request)
except URLError:
raise EmbedNotFoundException
oembed = json.loads(r.read().decode("utf-8"))
# Convert photos into HTML
if oembed["type"] == "photo":
html = '<img src="%s" alt="">' % (oembed["url"],)
else:
html = oembed.get("html")
# Return embed as a dict
return {
"title": oembed["title"] if "title" in oembed else "",
"author_name": oembed["author_name"] if "author_name" in oembed else "",
"provider_name": oembed["provider_name"] if "provider_name" in oembed else "",
"type": oembed["type"],
"thumbnail_url": oembed.get("thumbnail_url"),
"width": oembed.get("width"),
"height": oembed.get("height"),
"html": html,
}
|
https://github.com/wagtail/wagtail/issues/6646
|
Internal Server Error: /admin/pages/127028/edit/
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/django/core/handlers/exception.py", line 47, in inner
response = get_response(request)
File "/usr/local/lib/python3.8/dist-packages/django/core/handlers/base.py", line 179, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/usr/local/lib/python3.8/dist-packages/django/views/decorators/cache.py", line 44, in _wrapped_view_func
response = view_func(request, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/wagtail/admin/urls/__init__.py", line 127, in wrapper
return view_func(request, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/wagtail/admin/auth.py", line 193, in decorated_view
return view_func(request, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/django/views/generic/base.py", line 70, in view
return self.dispatch(request, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/wagtail/admin/views/pages/edit.py", line 131, in dispatch
return super().dispatch(request)
File "/usr/local/lib/python3.8/dist-packages/django/views/generic/base.py", line 98, in dispatch
return handler(request, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/wagtail/admin/views/pages/edit.py", line 217, in post
if self.form.is_valid() and not self.page_perms.page_locked():
File "/usr/local/lib/python3.8/dist-packages/modelcluster/forms.py", line 316, in is_valid
form_is_valid = super(ClusterForm, self).is_valid()
File "/usr/local/lib/python3.8/dist-packages/django/forms/forms.py", line 177, in is_valid
return self.is_bound and not self.errors
File "/usr/local/lib/python3.8/dist-packages/django/forms/forms.py", line 172, in errors
self.full_clean()
File "/usr/local/lib/python3.8/dist-packages/django/forms/forms.py", line 374, in full_clean
self._clean_fields()
File "/usr/local/lib/python3.8/dist-packages/django/forms/forms.py", line 392, in _clean_fields
value = field.clean(value)
File "/usr/local/lib/python3.8/dist-packages/wagtail/core/blocks/base.py", line 571, in clean
return self.block.clean(value)
File "/usr/local/lib/python3.8/dist-packages/wagtail/core/blocks/stream_block.py", line 202, in clean
(child.block.name, child.block.clean(child.value), child.id)
File "/usr/local/lib/python3.8/dist-packages/wagtail/core/blocks/struct_block.py", line 130, in clean
result.append((name, self.child_blocks[name].clean(val)))
File "/usr/local/lib/python3.8/dist-packages/wagtail/embeds/blocks.py", line 69, in clean
if isinstance(value, EmbedValue) and not value.html:
File "/usr/local/lib/python3.8/dist-packages/django/utils/functional.py", line 48, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "/usr/local/lib/python3.8/dist-packages/wagtail/embeds/blocks.py", line 22, in html
return embed_to_frontend_html(self.url)
File "/usr/local/lib/python3.8/dist-packages/wagtail/embeds/format.py", line 9, in embed_to_frontend_html
embed = embeds.get_embed(url)
File "/usr/local/lib/python3.8/dist-packages/wagtail/embeds/embeds.py", line 24, in get_embed
embed_dict = finder(url, max_width)
File "/usr/local/lib/python3.8/dist-packages/wagtail/embeds/embeds.py", line 20, in finder
return finder.find_embed(url, max_width=max_width)
File "/usr/local/lib/python3.8/dist-packages/wagtail/embeds/finders/oembed.py", line 65, in find_embed
oembed = json.loads(r.read().decode('utf-8'))
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
|
json.decoder.JSONDecodeError
|
def handle(self, *args, **options):
current_page_id = None
missing_models_content_type_ids = set()
for revision in (
PageRevision.objects.order_by("page_id", "created_at")
.select_related("page")
.iterator()
):
# This revision is for a page type that is no longer in the database. Bail out early.
if revision.page.content_type_id in missing_models_content_type_ids:
continue
if not revision.page.specific_class:
missing_models_content_type_ids.add(revision.page.content_type_id)
continue
is_new_page = revision.page_id != current_page_id
if is_new_page:
# reset previous revision when encountering a new page.
previous_revision = None
has_content_changes = False
current_page_id = revision.page_id
if not PageLogEntry.objects.filter(revision=revision).exists():
try:
current_revision_as_page = revision.as_page_object()
except Exception:
# restoring old revisions may fail if e.g. they have an on_delete=PROTECT foreign key
# to a no-longer-existing model instance. We cannot compare changes between two
# non-restorable revisions, although we can at least infer that there was a content
# change at the point that it went from restorable to non-restorable or vice versa.
current_revision_as_page = None
published = revision.id == revision.page.live_revision_id
if previous_revision is not None:
try:
previous_revision_as_page = previous_revision.as_page_object()
except Exception:
previous_revision_as_page = None
if (
previous_revision_as_page is None
and current_revision_as_page is None
):
# both revisions failed to restore - unable to determine presence of content changes
has_content_changes = False
elif (
previous_revision_as_page is None
or current_revision_as_page is None
):
# one or the other revision failed to restore, which indicates a content change
has_content_changes = True
else:
# Must use .specific so the comparison picks up all fields, not just base Page ones.
comparison = get_comparison(
revision.page.specific,
previous_revision_as_page,
current_revision_as_page,
)
has_content_changes = len(comparison) > 0
if (
current_revision_as_page is not None
and current_revision_as_page.live_revision_id
== previous_revision.id
):
# Log the previous revision publishing.
self.log_page_action("wagtail.publish", previous_revision, True)
if is_new_page or has_content_changes or published:
if is_new_page:
action = "wagtail.create"
elif published:
action = "wagtail.publish"
else:
action = "wagtail.edit"
if published and has_content_changes:
# When publishing, also log the 'draft save', but only if there have been content changes
self.log_page_action("wagtail.edit", revision, has_content_changes)
self.log_page_action(action, revision, has_content_changes)
previous_revision = revision
|
def handle(self, *args, **options):
current_page_id = None
missing_models_content_type_ids = set()
for revision in (
PageRevision.objects.order_by("page_id", "created_at")
.select_related("page")
.iterator()
):
# This revision is for a page type that is no longer in the database. Bail out early.
if revision.page.content_type_id in missing_models_content_type_ids:
continue
if not revision.page.specific_class:
missing_models_content_type_ids.add(revision.page.content_type_id)
continue
is_new_page = revision.page_id != current_page_id
if is_new_page:
# reset previous revision when encountering a new page.
previous_revision = None
has_content_changes = False
current_page_id = revision.page_id
if not PageLogEntry.objects.filter(revision=revision).exists():
try:
current_revision_as_page = revision.as_page_object()
except Exception:
# restoring old revisions may fail if e.g. they have an on_delete=PROTECT foreign key
# to a no-longer-existing model instance. We cannot compare changes between two
# non-restorable revisions, although we can at least infer that there was a content
# change at the point that it went from restorable to non-restorable or vice versa.
current_revision_as_page = None
published = revision.id == revision.page.live_revision_id
if previous_revision is not None:
try:
previous_revision_as_page = previous_revision.as_page_object()
except Exception:
previous_revision_as_page = None
if (
previous_revision_as_page is None
and current_revision_as_page is None
):
# both revisions failed to restore - unable to determine presence of content changes
has_content_changes = False
elif (
previous_revision_as_page is None
or current_revision_as_page is None
):
# one or the other revision failed to restore, which indicates a content change
has_content_changes = True
else:
# Must use .specific so the comparison picks up all fields, not just base Page ones.
comparison = get_comparison(
revision.page.specific,
previous_revision_as_page,
current_revision_as_page,
)
has_content_changes = len(comparison) > 0
if current_revision_as_page.live_revision_id == previous_revision.id:
# Log the previous revision publishing.
self.log_page_action("wagtail.publish", previous_revision, True)
if is_new_page or has_content_changes or published:
if is_new_page:
action = "wagtail.create"
elif published:
action = "wagtail.publish"
else:
action = "wagtail.edit"
if published and has_content_changes:
# When publishing, also log the 'draft save', but only if there have been content changes
self.log_page_action("wagtail.edit", revision, has_content_changes)
self.log_page_action(action, revision, has_content_changes)
previous_revision = revision
|
https://github.com/wagtail/wagtail/issues/6498
|
$ python manage.py create_log_entries_from_revisions
Traceback (most recent call last):
File "manage.py", line 12, in <module>
execute_from_command_line(sys.argv)
File "/lib/python3.6/site-packages/django/core/management/__init__.py", line 381, in execute_from_command_line
utility.execute()
File "/lib/python3.6/site-packages/django/core/management/__init__.py", line 375, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/lib/python3.6/site-packages/django/core/management/base.py", line 323, in run_from_argv
self.execute(*args, **cmd_options)
File "/lib/python3.6/site-packages/django/core/management/base.py", line 364, in execute
output = self.handle(*args, **options)
File "/lib/python3.6/site-packages/wagtail/core/management/commands/create_log_entries_from_revisions.py", line 62, in handle
if current_revision_as_page.live_revision_id == previous_revision.id:
AttributeError: 'NoneType' object has no attribute 'live_revision_id'
|
AttributeError
|
def can_delete(self, locale):
if not self.queryset.exclude(pk=locale.pk).exists():
self.cannot_delete_message = gettext_lazy(
"This locale cannot be deleted because there are no other locales."
)
return False
if get_locale_usage(locale) != (0, 0):
self.cannot_delete_message = gettext_lazy(
"This locale cannot be deleted because there are pages and/or other objects using it."
)
return False
return True
|
def can_delete(self, locale):
return get_locale_usage(locale) == (0, 0)
|
https://github.com/wagtail/wagtail/issues/6533
|
Internal Server Error: /locales/1/delete/
Traceback (most recent call last):
File "C:\Work\virtual_env\lib\site-packages\django\core\handlers\exception.py", line 47, in inner
response = get_response(request)
File "C:\Work\virtual_env\lib\site-packages\django\core\handlers\base.py", line 179, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "C:\Work\virtual_env\lib\site-packages\django\views\decorators\cache.py", line 44, in _wrapped_view_func
response = view_func(request, *args, **kwargs)
File "C:\Work\virtual_env\lib\site-packages\wagtail\admin\urls\__init__.py", line 127, in wrapper
return view_func(request, *args, **kwargs)
File "C:\Work\virtual_env\lib\site-packages\wagtail\admin\auth.py", line 193, in decorated_view
return view_func(request, *args, **kwargs)
File "C:\Work\virtual_env\lib\site-packages\django\views\generic\base.py", line 70, in view
return self.dispatch(request, *args, **kwargs)
File "C:\Work\virtual_env\lib\site-packages\wagtail\admin\views\generic.py", line 45, in dispatch
return super().dispatch(request, *args, **kwargs)
File "C:\Work\virtual_env\lib\site-packages\django\views\generic\base.py", line 98, in dispatch
return handler(request, *args, **kwargs)
File "C:\Work\virtual_env\lib\site-packages\django\views\generic\edit.py", line 218, in post
return self.delete(request, *args, **kwargs)
File "C:\Work\virtual_env\lib\site-packages\wagtail_localize\locales\views.py", line 62, in delete
return super().delete(request, *args, **kwargs)
File "C:\Work\virtual_env\lib\site-packages\wagtail\admin\views\generic.py", line 242, in delete
response = super().delete(request, *args, **kwargs)
File "C:\Work\virtual_env\lib\site-packages\django\views\generic\edit.py", line 213, in delete
self.object.delete()
File "C:\Work\virtual_env\lib\site-packages\django\db\models\base.py", line 946, in delete
collector.collect([self], keep_parents=keep_parents)
File "C:\Work\virtual_env\lib\site-packages\django\db\models\deletion.py", line 308, in collect
set(chain.from_iterable(protected_objects.values())),
django.db.models.deletion.ProtectedError: ("Cannot delete some instances of model 'Locale' because they are referenced through protected foreign keys: 'Page.locale'.", {<Page: Root>})
|
django.db.models.deletion.ProtectedError
|
def get_content_languages():
"""
Cache of settings.WAGTAIL_CONTENT_LANGUAGES in a dictionary for easy lookups by key.
"""
content_languages = getattr(settings, "WAGTAIL_CONTENT_LANGUAGES", None)
languages = dict(settings.LANGUAGES)
if content_languages is None:
# Default to a single language based on LANGUAGE_CODE
default_language_code = get_supported_language_variant(settings.LANGUAGE_CODE)
try:
language_name = languages[default_language_code]
except KeyError:
# get_supported_language_variant on the 'null' translation backend (used for
# USE_I18N=False) returns settings.LANGUAGE_CODE unchanged without accounting for
# language variants (en-us versus en), so retry with the generic version.
default_language_code = default_language_code.split("-")[0]
try:
language_name = languages[default_language_code]
except KeyError:
# Can't extract a display name, so fall back on displaying LANGUAGE_CODE instead
language_name = settings.LANGUAGE_CODE
# Also need to tweak the languages dict to get around the check below
languages[default_language_code] = settings.LANGUAGE_CODE
content_languages = [
(default_language_code, language_name),
]
# Check that each content language is in LANGUAGES
for language_code, name in content_languages:
if language_code not in languages:
raise ImproperlyConfigured(
"The language {} is specified in WAGTAIL_CONTENT_LANGUAGES but not LANGUAGES. "
"WAGTAIL_CONTENT_LANGUAGES must be a subset of LANGUAGES.".format(
language_code
)
)
return dict(content_languages)
|
def get_content_languages():
"""
Cache of settings.WAGTAIL_CONTENT_LANGUAGES in a dictionary for easy lookups by key.
"""
content_languages = getattr(settings, "WAGTAIL_CONTENT_LANGUAGES", None)
languages = dict(settings.LANGUAGES)
if content_languages is None:
# Default to a single language based on LANGUAGE_CODE
default_language_code = get_supported_language_variant(settings.LANGUAGE_CODE)
content_languages = [
(default_language_code, languages[default_language_code]),
]
# Check that each content language is in LANGUAGES
for language_code, name in content_languages:
if language_code not in languages:
raise ImproperlyConfigured(
"The language {} is specified in WAGTAIL_CONTENT_LANGUAGES but not LANGUAGES. "
"WAGTAIL_CONTENT_LANGUAGES must be a subset of LANGUAGES.".format(
language_code
)
)
return dict(content_languages)
|
https://github.com/wagtail/wagtail/issues/6539
|
Traceback (most recent call last):
File "./manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/core/management/base.py", line 330, in run_from_argv
self.execute(*args, **cmd_options)
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/core/management/base.py", line 371, in execute
output = self.handle(*args, **options)
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/core/management/base.py", line 85, in wrapped
res = handle_func(*args, **kwargs)
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/core/management/commands/migrate.py", line 92, in handle
executor = MigrationExecutor(connection, self.migration_progress_callback)
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/db/migrations/executor.py", line 18, in __init__
self.loader = MigrationLoader(self.connection)
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/db/migrations/loader.py", line 53, in __init__
self.build_graph()
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/db/migrations/loader.py", line 210, in build_graph
self.load_disk()
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/django/db/migrations/loader.py", line 112, in load_disk
migration_module = import_module(migration_path)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 967, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 677, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 728, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/wagtail/core/migrations/0056_page_locale_fields_populate.py", line 8, in <module>
class Migration(migrations.Migration):
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/wagtail/core/migrations/0056_page_locale_fields_populate.py", line 15, in Migration
BootstrapTranslatableModel('wagtailcore.Page'),
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/wagtail/core/models.py", line 538, in __init__
language_code = get_supported_content_language_variant(settings.LANGUAGE_CODE)
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/wagtail/core/utils.py", line 261, in get_supported_content_language_variant
supported_lang_codes = get_content_languages()
File "/Users/matthew/.virtualenvs/use18n/lib/python3.7/site-packages/wagtail/core/utils.py", line 224, in get_content_languages
(default_language_code, languages[default_language_code]),
KeyError: 'en-us'
|
KeyError
|
def get_active(cls):
"""
Returns the Locale that corresponds to the currently activated language in Django.
"""
try:
return cls.objects.get_for_language(translation.get_language())
except (cls.DoesNotExist, LookupError):
return cls.get_default()
|
def get_active(cls):
"""
Returns the Locale that corresponds to the currently activated language in Django.
"""
try:
return cls.objects.get_for_language(translation.get_language())
except cls.DoesNotExist:
return cls.get_default()
|
https://github.com/wagtail/wagtail/issues/6540
|
Traceback (most recent call last):
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/django/core/handlers/exception.py", line 47, in inner
response = get_response(request)
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/django/core/handlers/base.py", line 179, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/views.py", line 17, in serve
page, args, kwargs = site.root_page.localized.specific.route(request, path_components)
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 1229, in localized
localized = self.localized_draft
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 1212, in localized_draft
locale = Locale.get_active()
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 372, in get_active
return cls.objects.get_for_language(translation.get_language())
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 339, in get_for_language
return self.get(language_code=get_supported_content_language_variant(language_code))
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/utils.py", line 271, in get_supported_content_language_variant
raise LookupError(lang_code)
LookupError: de
|
LookupError
|
def localized(self):
"""
Finds the translation in the current active language.
If there is no translation in the active language, self is returned.
"""
try:
locale = Locale.get_active()
except (LookupError, Locale.DoesNotExist):
return self
if locale.id == self.locale_id:
return self
return self.get_translation_or_none(locale) or self
|
def localized(self):
"""
Finds the translation in the current active language.
If there is no translation in the active language, self is returned.
"""
locale = Locale.get_active()
if locale.id == self.locale_id:
return self
return self.get_translation_or_none(locale) or self
|
https://github.com/wagtail/wagtail/issues/6540
|
Traceback (most recent call last):
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/django/core/handlers/exception.py", line 47, in inner
response = get_response(request)
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/django/core/handlers/base.py", line 179, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/views.py", line 17, in serve
page, args, kwargs = site.root_page.localized.specific.route(request, path_components)
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 1229, in localized
localized = self.localized_draft
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 1212, in localized_draft
locale = Locale.get_active()
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 372, in get_active
return cls.objects.get_for_language(translation.get_language())
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 339, in get_for_language
return self.get(language_code=get_supported_content_language_variant(language_code))
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/utils.py", line 271, in get_supported_content_language_variant
raise LookupError(lang_code)
LookupError: de
|
LookupError
|
def localized_draft(self):
"""
Finds the translation in the current active language.
If there is no translation in the active language, self is returned.
Note: This will return translations that are in draft. If you want to exclude
these, use the ``.localized`` attribute.
"""
try:
locale = Locale.get_active()
except (LookupError, Locale.DoesNotExist):
return self
if locale.id == self.locale_id:
return self
return self.get_translation_or_none(locale) or self
|
def localized_draft(self):
"""
Finds the translation in the current active language.
If there is no translation in the active language, self is returned.
Note: This will return translations that are in draft. If you want to exclude
these, use the ``.localized`` attribute.
"""
locale = Locale.get_active()
if locale.id == self.locale_id:
return self
return self.get_translation_or_none(locale) or self
|
https://github.com/wagtail/wagtail/issues/6540
|
Traceback (most recent call last):
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/django/core/handlers/exception.py", line 47, in inner
response = get_response(request)
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/django/core/handlers/base.py", line 179, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/views.py", line 17, in serve
page, args, kwargs = site.root_page.localized.specific.route(request, path_components)
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 1229, in localized
localized = self.localized_draft
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 1212, in localized_draft
locale = Locale.get_active()
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 372, in get_active
return cls.objects.get_for_language(translation.get_language())
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/models.py", line 339, in get_for_language
return self.get(language_code=get_supported_content_language_variant(language_code))
File "/home/sef/.virtualenvs/wagtail-bugreport-u47kcAMn/lib/python3.8/site-packages/wagtail/core/utils.py", line 271, in get_supported_content_language_variant
raise LookupError(lang_code)
LookupError: de
|
LookupError
|
def get_url_parts(self, request=None):
"""
Determine the URL for this page and return it as a tuple of
``(site_id, site_root_url, page_url_relative_to_site_root)``.
Return None if the page is not routable.
This is used internally by the ``full_url``, ``url``, ``relative_url``
and ``get_site`` properties and methods; pages with custom URL routing
should override this method in order to have those operations return
the custom URLs.
Accepts an optional keyword argument ``request``, which may be used
to avoid repeated database / cache lookups. Typically, a page model
that overrides ``get_url_parts`` should not need to deal with
``request`` directly, and should just pass it to the original method
when calling ``super``.
"""
possible_sites = [
(pk, path, url, language_code)
for pk, path, url, language_code in self._get_site_root_paths(request)
if self.url_path.startswith(path)
]
if not possible_sites:
return None
site_id, root_path, root_url, language_code = possible_sites[0]
site = Site.find_for_request(request)
if site:
for site_id, root_path, root_url, language_code in possible_sites:
if site_id == site.pk:
break
else:
site_id, root_path, root_url, language_code = possible_sites[0]
use_wagtail_i18n = getattr(settings, "WAGTAIL_I18N_ENABLED", False)
if use_wagtail_i18n:
# If the active language code is a variant of the page's language, then
# use that instead
# This is used when LANGUAGES contain more languages than WAGTAIL_CONTENT_LANGUAGES
try:
if (
get_supported_content_language_variant(translation.get_language())
== language_code
):
language_code = translation.get_language()
except LookupError:
# active language code is not a recognised content language, so leave
# page's language code unchanged
pass
# The page may not be routable because wagtail_serve is not registered
# This may be the case if Wagtail is used headless
try:
if use_wagtail_i18n:
with translation.override(language_code):
page_path = reverse(
"wagtail_serve", args=(self.url_path[len(root_path) :],)
)
else:
page_path = reverse(
"wagtail_serve", args=(self.url_path[len(root_path) :],)
)
except NoReverseMatch:
return (site_id, None, None)
# Remove the trailing slash from the URL reverse generates if
# WAGTAIL_APPEND_SLASH is False and we're not trying to serve
# the root path
if not WAGTAIL_APPEND_SLASH and page_path != "/":
page_path = page_path.rstrip("/")
return (site_id, root_url, page_path)
|
def get_url_parts(self, request=None):
"""
Determine the URL for this page and return it as a tuple of
``(site_id, site_root_url, page_url_relative_to_site_root)``.
Return None if the page is not routable.
This is used internally by the ``full_url``, ``url``, ``relative_url``
and ``get_site`` properties and methods; pages with custom URL routing
should override this method in order to have those operations return
the custom URLs.
Accepts an optional keyword argument ``request``, which may be used
to avoid repeated database / cache lookups. Typically, a page model
that overrides ``get_url_parts`` should not need to deal with
``request`` directly, and should just pass it to the original method
when calling ``super``.
"""
possible_sites = [
(pk, path, url, language_code)
for pk, path, url, language_code in self._get_site_root_paths(request)
if self.url_path.startswith(path)
]
if not possible_sites:
return None
site_id, root_path, root_url, language_code = possible_sites[0]
site = Site.find_for_request(request)
if site:
for site_id, root_path, root_url, language_code in possible_sites:
if site_id == site.pk:
break
else:
site_id, root_path, root_url, language_code = possible_sites[0]
# If the active language code is a variant of the page's language, then
# use that instead
# This is used when LANGUAGES contain more languages than WAGTAIL_CONTENT_LANGUAGES
if (
get_supported_content_language_variant(translation.get_language())
== language_code
):
language_code = translation.get_language()
# The page may not be routable because wagtail_serve is not registered
# This may be the case if Wagtail is used headless
try:
with translation.override(language_code):
page_path = reverse(
"wagtail_serve", args=(self.url_path[len(root_path) :],)
)
except NoReverseMatch:
return (site_id, None, None)
# Remove the trailing slash from the URL reverse generates if
# WAGTAIL_APPEND_SLASH is False and we're not trying to serve
# the root path
if not WAGTAIL_APPEND_SLASH and page_path != "/":
page_path = page_path.rstrip("/")
return (site_id, root_url, page_path)
|
https://github.com/wagtail/wagtail/issues/6511
|
Internal Server Error: /admin/api/main/pages/
Traceback (most recent call last):
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/django/core/handlers/exception.py", line 47, in inner
response = get_response(request)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/django/core/handlers/base.py", line 179, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/django/views/decorators/cache.py", line 44, in _wrapped_view_func
response = view_func(request, *args, **kwargs)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/admin/urls/__init__.py", line 127, in wrapper
return view_func(request, *args, **kwargs)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/admin/auth.py", line 170, in decorated_view
response = view_func(request, *args, **kwargs)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/api/v2/router.py", line 63, in wrapped
return func(request, *args, **kwargs)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/django/views/decorators/csrf.py", line 54, in wrapped_view
return view_func(*args, **kwargs)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/rest_framework/viewsets.py", line 125, in view
return self.dispatch(request, *args, **kwargs)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/rest_framework/views.py", line 509, in dispatch
response = self.handle_exception(exc)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/api/v2/views.py", line 116, in handle_exception
return super().handle_exception(exc)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/rest_framework/views.py", line 469, in handle_exception
self.raise_uncaught_exception(exc)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/rest_framework/views.py", line 480, in raise_uncaught_exception
raise exc
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/rest_framework/views.py", line 506, in dispatch
response = handler(request, *args, **kwargs)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/admin/api/views.py", line 98, in listing_view
response = super().listing_view(request)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/api/v2/views.py", line 74, in listing_view
return self.get_paginated_response(serializer.data)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/rest_framework/serializers.py", line 745, in data
ret = super().data
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/rest_framework/serializers.py", line 246, in data
self._data = self.to_representation(self.instance)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/rest_framework/serializers.py", line 663, in to_representation
return [
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/rest_framework/serializers.py", line 664, in <listcomp>
self.child.to_representation(item) for item in iterable
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/api/v2/serializers.py", line 287, in to_representation
meta[field.field_name] = field.to_representation(attribute)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/api/v2/serializers.py", line 62, in to_representation
return page.full_url
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/core/models.py", line 1609, in get_full_url
url_parts = self.get_url_parts(request=request)
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/core/models.py", line 1587, in get_url_parts
if get_supported_content_language_variant(translation.get_language()) == language_code:
File "/Users/matthew/.virtualenvs/langwithoutwagtail/lib/python3.9/site-packages/wagtail/core/utils.py", line 271, in get_supported_content_language_variant
raise LookupError(lang_code)
LookupError: fr
|
LookupError
|
def handle(self, *args, **options):
current_page_id = None
missing_models_content_type_ids = set()
for revision in (
PageRevision.objects.order_by("page_id", "created_at")
.select_related("page")
.iterator()
):
# This revision is for a page type that is no longer in the database. Bail out early.
if revision.page.content_type_id in missing_models_content_type_ids:
continue
if not revision.page.specific_class:
missing_models_content_type_ids.add(revision.page.content_type_id)
continue
is_new_page = revision.page_id != current_page_id
if is_new_page:
# reset previous revision when encountering a new page.
previous_revision = None
has_content_changes = False
current_page_id = revision.page_id
if not PageLogEntry.objects.filter(revision=revision).exists():
try:
current_revision_as_page = revision.as_page_object()
except Exception:
# restoring old revisions may fail if e.g. they have an on_delete=PROTECT foreign key
# to a no-longer-existing model instance. We cannot compare changes between two
# non-restorable revisions, although we can at least infer that there was a content
# change at the point that it went from restorable to non-restorable or vice versa.
current_revision_as_page = None
published = revision.id == revision.page.live_revision_id
if previous_revision is not None:
try:
previous_revision_as_page = previous_revision.as_page_object()
except Exception:
previous_revision_as_page = None
if (
previous_revision_as_page is None
and current_revision_as_page is None
):
# both revisions failed to restore - unable to determine presence of content changes
has_content_changes = False
elif (
previous_revision_as_page is None
or current_revision_as_page is None
):
# one or the other revision failed to restore, which indicates a content change
has_content_changes = True
else:
# Must use .specific so the comparison picks up all fields, not just base Page ones.
comparison = get_comparison(
revision.page.specific,
previous_revision_as_page,
current_revision_as_page,
)
has_content_changes = len(comparison) > 0
if current_revision_as_page.live_revision_id == previous_revision.id:
# Log the previous revision publishing.
self.log_page_action("wagtail.publish", previous_revision, True)
if is_new_page or has_content_changes or published:
if is_new_page:
action = "wagtail.create"
elif published:
action = "wagtail.publish"
else:
action = "wagtail.edit"
if published and has_content_changes:
# When publishing, also log the 'draft save', but only if there have been content changes
self.log_page_action("wagtail.edit", revision, has_content_changes)
self.log_page_action(action, revision, has_content_changes)
previous_revision = revision
|
def handle(self, *args, **options):
current_page_id = None
missing_models_content_type_ids = set()
for revision in (
PageRevision.objects.order_by("page_id", "created_at")
.select_related("page")
.iterator()
):
# This revision is for a page type that is no longer in the database. Bail out early.
if revision.page.content_type_id in missing_models_content_type_ids:
continue
if not revision.page.specific_class:
missing_models_content_type_ids.add(revision.page.content_type_id)
continue
is_new_page = revision.page_id != current_page_id
if is_new_page:
# reset previous revision when encountering a new page.
previous_revision = None
has_content_changes = False
current_page_id = revision.page_id
if not PageLogEntry.objects.filter(revision=revision).exists():
current_revision_as_page = revision.as_page_object()
published = revision.id == revision.page.live_revision_id
if previous_revision is not None:
# Must use .specific so the comparison picks up all fields, not just base Page ones.
comparison = get_comparison(
revision.page.specific,
previous_revision.as_page_object(),
current_revision_as_page,
)
has_content_changes = len(comparison) > 0
if current_revision_as_page.live_revision_id == previous_revision.id:
# Log the previous revision publishing.
self.log_page_action("wagtail.publish", previous_revision, True)
if is_new_page or has_content_changes or published:
if is_new_page:
action = "wagtail.create"
elif published:
action = "wagtail.publish"
else:
action = "wagtail.edit"
if published and has_content_changes:
# When publishing, also log the 'draft save', but only if there have been content changes
self.log_page_action("wagtail.edit", revision, has_content_changes)
self.log_page_action(action, revision, has_content_changes)
previous_revision = revision
|
https://github.com/wagtail/wagtail/issues/6368
|
Traceback (most recent call last):
File "./cfgov/manage.py", line 11, in <module>
execute_from_command_line(sys.argv)
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/django/core/management/__init__.py", line 381, in execute_from_command_line
utility.execute()
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/django/core/management/__init__.py", line 375, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/django/core/management/base.py", line 323, in run_from_argv
self.execute(*args, **cmd_options)
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/django/core/management/base.py", line 364, in execute
output = self.handle(*args, **options)
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/wagtail/core/management/commands/create_log_entries_from_revisions.py", line 34, in handle
current_revision_as_page = revision.as_page_object()
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/wagtail/core/models.py", line 1977, in as_page_object
return self.page.specific.with_content_json(self.content_json)
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/wagtail/core/models.py", line 1830, in with_content_json
obj = self.specific_class.from_json(content_json)
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/modelcluster/models.py", line 272, in from_json
return cls.from_serializable_data(json.loads(json_data), check_fks=check_fks, strict_fks=strict_fks)
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/modelcluster/models.py", line 239, in from_serializable_data
obj = model_from_serializable_data(cls, data, check_fks=check_fks, strict_fks=strict_fks)
File "/Users/higginsw/.pyenv/versions/c15gov/lib/python3.6/site-packages/modelcluster/models.py", line 100, in model_from_serializable_data
raise Exception("can't currently handle on_delete types other than CASCADE, SET_NULL and DO_NOTHING")
Exception: can't currently handle on_delete types other than CASCADE, SET_NULL and DO_NOTHING
|
Exception
|
def timesince_last_update(last_update, time_prefix="", use_shorthand=True):
"""
Returns:
- the time of update if last_update is today, if any prefix is supplied, the output will use it
- time since last update othewise. Defaults to the simplified timesince,
but can return the full string if needed
"""
if last_update.date() == datetime.today().date():
if timezone.is_aware(last_update):
time_str = timezone.localtime(last_update).strftime("%H:%M")
else:
time_str = last_update.strftime("%H:%M")
return (
time_str
if not time_prefix
else "%(prefix)s %(formatted_time)s"
% {"prefix": time_prefix, "formatted_time": time_str}
)
else:
if use_shorthand:
return timesince_simple(last_update)
return _("%(time_period)s ago") % {"time_period": timesince(last_update)}
|
def timesince_last_update(last_update, time_prefix="", use_shorthand=True):
"""
Returns:
- the time of update if last_update is today, if any prefix is supplied, the output will use it
- time since last update othewise. Defaults to the simplified timesince,
but can return the full string if needed
"""
if last_update.date() == datetime.today().date():
time_str = timezone.localtime(last_update).strftime("%H:%M")
return (
time_str
if not time_prefix
else "%(prefix)s %(formatted_time)s"
% {"prefix": time_prefix, "formatted_time": time_str}
)
else:
if use_shorthand:
return timesince_simple(last_update)
return _("%(time_period)s ago") % {"time_period": timesince(last_update)}
|
https://github.com/wagtail/wagtail/issues/6345
|
Environment:
Request Method: GET
Request URL: http://127.0.0.1:8000/admin/pages/3/edit/
Django Version: 3.1
Python Version: 3.8.3
Installed Applications:
['home',
'search',
'wagtail.contrib.forms',
'wagtail.contrib.redirects',
'wagtail.embeds',
'wagtail.sites',
'wagtail.users',
'wagtail.snippets',
'wagtail.documents',
'wagtail.images',
'wagtail.search',
'wagtail.admin',
'wagtail.core',
'modelcluster',
'taggit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles']
Installed Middleware:
['django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'wagtail.contrib.redirects.middleware.RedirectMiddleware']
Template error:
In template /Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/wagtail/admin/templates/wagtailadmin/shared/last_updated.html, error at line 4
localtime() cannot be applied to a naive datetime
1 : {% load i18n wagtailadmin_tags %}
2 : {% if last_updated %}
3 : <span title="{{ last_updated }}" data-wagtail-tooltip="{{ last_updated }}" {% if classname %}class="{{ classname }}"{% endif %}>
4 : {% if since_text %}{{ since_text }}{% endif %} {% timesince_last_update last_updated time_prefix=time_prefix %}
5 : </span>
6 : {% endif %}
7 :
Traceback (most recent call last):
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/core/handlers/exception.py", line 47, in inner
response = get_response(request)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/core/handlers/base.py", line 202, in _get_response
response = response.render()
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/response.py", line 105, in render
self.content = self.rendered_content
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/response.py", line 83, in rendered_content
return template.render(context, self._request)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/backends/django.py", line 61, in render
return self.template.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 170, in render
return self._render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 162, in _render
return self.nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/loader_tags.py", line 150, in render
return compiled_parent._render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 162, in _render
return self.nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/loader_tags.py", line 150, in render
return compiled_parent._render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 162, in _render
return self.nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/loader_tags.py", line 150, in render
return compiled_parent._render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 162, in _render
return self.nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/loader_tags.py", line 62, in render
result = block.nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/loader_tags.py", line 62, in render
result = block.nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/loader_tags.py", line 192, in render
return template.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 172, in render
return self._render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 162, in _render
return self.nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/defaulttags.py", line 312, in render
return nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/defaulttags.py", line 312, in render
return nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/loader_tags.py", line 192, in render
return template.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 172, in render
return self._render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 162, in _render
return self.nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/defaulttags.py", line 312, in render
return nodelist.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 938, in render
bit = node.render_annotated(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/base.py", line 905, in render_annotated
return self.render(context)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/template/library.py", line 192, in render
output = self.func(*resolved_args, **resolved_kwargs)
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/wagtail/admin/templatetags/wagtailadmin_tags.py", line 587, in timesince_last_update
time_str = timezone.localtime(last_update).strftime("%H:%M")
File "/Users/stefan/.virtualenvs/test-wagtail/lib/python3.8/site-packages/django/utils/timezone.py", line 171, in localtime
raise ValueError("localtime() cannot be applied to a naive datetime")
Exception Type: ValueError at /admin/pages/3/edit/
Exception Value: localtime() cannot be applied to a naive datetime
|
ValueError
|
def __get__(self, obj, type=None):
if obj is None:
return self
field_name = self.field.name
if field_name not in obj.__dict__:
# Field is deferred. Fetch it from db.
obj.refresh_from_db(fields=[field_name])
return obj.__dict__[field_name]
|
def __get__(self, obj, type=None):
if obj is None:
return self
return obj.__dict__[self.field.name]
|
https://github.com/wagtail/wagtail/issues/5537
|
(Pdb++) page
<SimplePage: test>
(Pdb++) page.content
*** KeyError: 'content'
Traceback (most recent call last):
File "/home/myproject/.local/share/virtualenvs/app-4PlAip0Q/lib/python3.7/site-packages/wagtail/core/fields.py", line 34, in __get__
return obj.__dict__[self.field.name]
(Pdb++) page._meta.model.objects.get(pk=page.pk).content
[<wagtail.core.blocks.stream_block.StreamValue.StreamChild object at 0x7fba2f43fb10>]
|
KeyError
|
def to_representation(self, page):
if page.specific_class is None:
return None
name = page.specific_class._meta.app_label + "." + page.specific_class.__name__
self.context["view"].seen_types[name] = page.specific_class
return name
|
def to_representation(self, page):
name = page.specific_class._meta.app_label + "." + page.specific_class.__name__
self.context["view"].seen_types[name] = page.specific_class
return name
|
https://github.com/wagtail/wagtail/issues/4592
|
Internal Server Error: /admin/api/v2beta/pages/
Traceback (most recent call last):
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/django/core/handlers/exception.py", line 35, in inner
response = get_response(request)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/django/core/handlers/base.py", line 128, in _get_response
response = self.process_exception_by_middleware(e, request)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/django/core/handlers/base.py", line 126, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/django/views/decorators/cache.py", line 31, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/vagrant/wagtail/wagtail/admin/urls/__init__.py", line 102, in wrapper
return view_func(request, *args, **kwargs)
File "/vagrant/wagtail/wagtail/admin/decorators.py", line 34, in decorated_view
return view_func(request, *args, **kwargs)
File "/vagrant/wagtail/wagtail/api/v2/router.py", line 63, in wrapped
return func(request, *args, **kwargs)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/django/views/decorators/csrf.py", line 54, in wrapped_view
return view_func(*args, **kwargs)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/rest_framework/viewsets.py", line 95, in view
return self.dispatch(request, *args, **kwargs)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/rest_framework/views.py", line 494, in dispatch
response = self.handle_exception(exc)
File "/vagrant/wagtail/wagtail/api/v2/endpoints.py", line 116, in handle_exception
return super().handle_exception(exc)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/rest_framework/views.py", line 454, in handle_exception
self.raise_uncaught_exception(exc)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/rest_framework/views.py", line 491, in dispatch
response = handler(request, *args, **kwargs)
File "/vagrant/wagtail/wagtail/admin/api/endpoints.py", line 94, in listing_view
response = super().listing_view(request)
File "/vagrant/wagtail/wagtail/api/v2/endpoints.py", line 74, in listing_view
return self.get_paginated_response(serializer.data)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/rest_framework/serializers.py", line 742, in data
ret = super(ListSerializer, self).data
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/rest_framework/serializers.py", line 262, in data
self._data = self.to_representation(self.instance)
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/rest_framework/serializers.py", line 660, in to_representation
self.child.to_representation(item) for item in iterable
File "/home/vagrant/.virtualenvs/bakerydemo/lib/python3.4/site-packages/rest_framework/serializers.py", line 660, in <listcomp>
self.child.to_representation(item) for item in iterable
File "/vagrant/wagtail/wagtail/api/v2/serializers.py", line 275, in to_representation
meta[field.field_name] = field.to_representation(attribute)
File "/vagrant/wagtail/wagtail/api/v2/serializers.py", line 82, in to_representation
name = page.specific_class._meta.app_label + '.' + page.specific_class.__name__
AttributeError: 'NoneType' object has no attribute '_meta'
|
AttributeError
|
def get_queryset(self):
request = self.request
# Allow pages to be filtered to a specific type
try:
models = page_models_from_string(request.GET.get("type", "wagtailcore.Page"))
except (LookupError, ValueError):
raise BadRequestError("type doesn't exist")
if not models:
models = [Page]
if len(models) == 1:
queryset = models[0].objects.all()
else:
queryset = Page.objects.all()
# Filter pages by specified models
queryset = filter_page_type(queryset, models)
# Get live pages that are not in a private section
queryset = queryset.public().live()
# Filter by site
if request.site:
queryset = queryset.descendant_of(request.site.root_page, inclusive=True)
else:
# No sites configured
queryset = queryset.none()
return queryset
|
def get_queryset(self):
request = self.request
# Allow pages to be filtered to a specific type
try:
models = page_models_from_string(request.GET.get("type", "wagtailcore.Page"))
except (LookupError, ValueError):
raise BadRequestError("type doesn't exist")
if not models:
models = [Page]
if len(models) == 1:
queryset = models[0].objects.all()
else:
queryset = Page.objects.all()
# Filter pages by specified models
queryset = filter_page_type(queryset, models)
# Get live pages that are not in a private section
queryset = queryset.public().live()
# Filter by site
queryset = queryset.descendant_of(request.site.root_page, inclusive=True)
return queryset
|
https://github.com/wagtail/wagtail/issues/3967
|
Internal Server Error: /cms/api/v2beta/pages/
Traceback (most recent call last):
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/core/handlers/exception.py", line 41, in inner
response = get_response(request)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/core/handlers/base.py", line 249, in _legacy_get_response
response = self._get_response(request)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/core/handlers/base.py", line 187, in _get_response
response = self.process_exception_by_middleware(e, request)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/core/handlers/base.py", line 185, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/views/decorators/cache.py", line 43, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/wagtailadmin/urls/__init__.py", line 96, in wrapper
return view_func(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/wagtailadmin/decorators.py", line 31, in decorated_view
return view_func(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/router.py", line 65, in wrapped
return func(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/views/decorators/csrf.py", line 58, in wrapped_view
return view_func(*args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/viewsets.py", line 90, in view
return self.dispatch(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/views.py", line 489, in dispatch
response = self.handle_exception(exc)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/endpoints.py", line 95, in handle_exception
return super(BaseAPIEndpoint, self).handle_exception(exc)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/views.py", line 449, in handle_exception
self.raise_uncaught_exception(exc)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/views.py", line 486, in dispatch
response = handler(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/wagtailadmin/api/endpoints.py", line 93, in listing_view
response = super(PagesAdminAPIEndpoint, self).listing_view(request)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/endpoints.py", line 81, in listing_view
return self.get_paginated_response(serializer.data)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/serializers.py", line 739, in data
ret = super(ListSerializer, self).data
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/serializers.py", line 263, in data
self._data = self.to_representation(self.instance)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/serializers.py", line 657, in to_representation
self.child.to_representation(item) for item in iterable
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/serializers.py", line 657, in <listcomp>
self.child.to_representation(item) for item in iterable
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/serializers.py", line 275, in to_representation
attribute = field.get_attribute(instance)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/serializers.py", line 47, in get_attribute
url = get_object_detail_url(self.context, type(instance), instance.pk)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/serializers.py", line 20, in get_object_detail_url
return get_full_url(context['request'], url_path)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/utils.py", line 25, in get_full_url
base_url = get_base_url(request) or ''
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/utils.py", line 15, in get_base_url
base_url = getattr(settings, 'WAGTAILAPI_BASE_URL', request.site.root_url if request else None)
AttributeError: 'NoneType' object has no attribute 'root_url'
|
AttributeError
|
def get_base_url(request=None):
base_url = getattr(
settings,
"WAGTAILAPI_BASE_URL",
request.site.root_url if request and request.site else None,
)
if base_url:
# We only want the scheme and netloc
base_url_parsed = urlparse(base_url)
return base_url_parsed.scheme + "://" + base_url_parsed.netloc
|
def get_base_url(request=None):
base_url = getattr(
settings, "WAGTAILAPI_BASE_URL", request.site.root_url if request else None
)
if base_url:
# We only want the scheme and netloc
base_url_parsed = urlparse(base_url)
return base_url_parsed.scheme + "://" + base_url_parsed.netloc
|
https://github.com/wagtail/wagtail/issues/3967
|
Internal Server Error: /cms/api/v2beta/pages/
Traceback (most recent call last):
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/core/handlers/exception.py", line 41, in inner
response = get_response(request)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/core/handlers/base.py", line 249, in _legacy_get_response
response = self._get_response(request)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/core/handlers/base.py", line 187, in _get_response
response = self.process_exception_by_middleware(e, request)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/core/handlers/base.py", line 185, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/views/decorators/cache.py", line 43, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/wagtailadmin/urls/__init__.py", line 96, in wrapper
return view_func(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/wagtailadmin/decorators.py", line 31, in decorated_view
return view_func(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/router.py", line 65, in wrapped
return func(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/django/views/decorators/csrf.py", line 58, in wrapped_view
return view_func(*args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/viewsets.py", line 90, in view
return self.dispatch(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/views.py", line 489, in dispatch
response = self.handle_exception(exc)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/endpoints.py", line 95, in handle_exception
return super(BaseAPIEndpoint, self).handle_exception(exc)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/views.py", line 449, in handle_exception
self.raise_uncaught_exception(exc)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/views.py", line 486, in dispatch
response = handler(request, *args, **kwargs)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/wagtailadmin/api/endpoints.py", line 93, in listing_view
response = super(PagesAdminAPIEndpoint, self).listing_view(request)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/endpoints.py", line 81, in listing_view
return self.get_paginated_response(serializer.data)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/serializers.py", line 739, in data
ret = super(ListSerializer, self).data
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/serializers.py", line 263, in data
self._data = self.to_representation(self.instance)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/serializers.py", line 657, in to_representation
self.child.to_representation(item) for item in iterable
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/rest_framework/serializers.py", line 657, in <listcomp>
self.child.to_representation(item) for item in iterable
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/serializers.py", line 275, in to_representation
attribute = field.get_attribute(instance)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/serializers.py", line 47, in get_attribute
url = get_object_detail_url(self.context, type(instance), instance.pk)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/serializers.py", line 20, in get_object_detail_url
return get_full_url(context['request'], url_path)
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/utils.py", line 25, in get_full_url
base_url = get_base_url(request) or ''
File "/Users/thomas/virtualenvs/djangoenv/lib/python3.5/site-packages/wagtail/api/v2/utils.py", line 15, in get_base_url
base_url = getattr(settings, 'WAGTAILAPI_BASE_URL', request.site.root_url if request else None)
AttributeError: 'NoneType' object has no attribute 'root_url'
|
AttributeError
|
def dummy_request(self, original_request=None, **meta):
"""
Construct a HttpRequest object that is, as far as possible, representative of ones that would
receive this page as a response. Used for previewing / moderation and any other place where we
want to display a view of this page in the admin interface without going through the regular
page routing logic.
If you pass in a real request object as original_request, additional information (e.g. client IP, cookies)
will be included in the dummy request.
"""
url = self.full_url
if url:
url_info = urlparse(url)
hostname = url_info.hostname
path = url_info.path
port = url_info.port or 80
scheme = url_info.scheme
else:
# Cannot determine a URL to this page - cobble one together based on
# whatever we find in ALLOWED_HOSTS
try:
hostname = settings.ALLOWED_HOSTS[0]
if hostname == "*":
# '*' is a valid value to find in ALLOWED_HOSTS[0], but it's not a valid domain name.
# So we pretend it isn't there.
raise IndexError
except IndexError:
hostname = "localhost"
path = "/"
port = 80
scheme = "http"
dummy_values = {
"REQUEST_METHOD": "GET",
"PATH_INFO": path,
"SERVER_NAME": hostname,
"SERVER_PORT": port,
"SERVER_PROTOCOL": "HTTP/1.1",
"HTTP_HOST": hostname,
"wsgi.version": (1, 0),
"wsgi.input": StringIO(),
"wsgi.errors": StringIO(),
"wsgi.url_scheme": scheme,
"wsgi.multithread": True,
"wsgi.multiprocess": True,
"wsgi.run_once": False,
}
# Add important values from the original request object, if it was provided.
HEADERS_FROM_ORIGINAL_REQUEST = [
"REMOTE_ADDR",
"HTTP_X_FORWARDED_FOR",
"HTTP_COOKIE",
"HTTP_USER_AGENT",
"wsgi.version",
"wsgi.multithread",
"wsgi.multiprocess",
"wsgi.run_once",
]
if original_request:
for header in HEADERS_FROM_ORIGINAL_REQUEST:
if header in original_request.META:
dummy_values[header] = original_request.META[header]
# Add additional custom metadata sent by the caller.
dummy_values.update(**meta)
request = WSGIRequest(dummy_values)
# Apply middleware to the request
# Note that Django makes sure only one of the middleware settings are
# used in a project
if hasattr(settings, "MIDDLEWARE"):
handler = BaseHandler()
handler.load_middleware()
handler._middleware_chain(request)
elif hasattr(settings, "MIDDLEWARE_CLASSES"):
# Pre Django 1.10 style - see http://www.mellowmorning.com/2011/04/18/mock-django-request-for-testing/
handler = BaseHandler()
handler.load_middleware()
# call each middleware in turn and throw away any responses that they might return
for middleware_method in handler._request_middleware:
middleware_method(request)
return request
|
def dummy_request(self, original_request=None, **meta):
"""
Construct a HttpRequest object that is, as far as possible, representative of ones that would
receive this page as a response. Used for previewing / moderation and any other place where we
want to display a view of this page in the admin interface without going through the regular
page routing logic.
If you pass in a real request object as original_request, additional information (e.g. client IP, cookies)
will be included in the dummy request.
"""
url = self.full_url
if url:
url_info = urlparse(url)
hostname = url_info.hostname
path = url_info.path
port = url_info.port or 80
else:
# Cannot determine a URL to this page - cobble one together based on
# whatever we find in ALLOWED_HOSTS
try:
hostname = settings.ALLOWED_HOSTS[0]
if hostname == "*":
# '*' is a valid value to find in ALLOWED_HOSTS[0], but it's not a valid domain name.
# So we pretend it isn't there.
raise IndexError
except IndexError:
hostname = "localhost"
path = "/"
port = 80
dummy_values = {
"REQUEST_METHOD": "GET",
"PATH_INFO": path,
"SERVER_NAME": hostname,
"SERVER_PORT": port,
"HTTP_HOST": hostname,
"wsgi.input": StringIO(),
}
# Add important values from the original request object, if it was provided.
if original_request:
if original_request.META.get("REMOTE_ADDR"):
dummy_values["REMOTE_ADDR"] = original_request.META["REMOTE_ADDR"]
if original_request.META.get("HTTP_X_FORWARDED_FOR"):
dummy_values["HTTP_X_FORWARDED_FOR"] = original_request.META[
"HTTP_X_FORWARDED_FOR"
]
if original_request.META.get("HTTP_COOKIE"):
dummy_values["HTTP_COOKIE"] = original_request.META["HTTP_COOKIE"]
if original_request.META.get("HTTP_USER_AGENT"):
dummy_values["HTTP_USER_AGENT"] = original_request.META["HTTP_USER_AGENT"]
# Add additional custom metadata sent by the caller.
dummy_values.update(**meta)
request = WSGIRequest(dummy_values)
# Apply middleware to the request
# Note that Django makes sure only one of the middleware settings are
# used in a project
if hasattr(settings, "MIDDLEWARE"):
handler = BaseHandler()
handler.load_middleware()
handler._middleware_chain(request)
elif hasattr(settings, "MIDDLEWARE_CLASSES"):
# Pre Django 1.10 style - see http://www.mellowmorning.com/2011/04/18/mock-django-request-for-testing/
handler = BaseHandler()
handler.load_middleware()
# call each middleware in turn and throw away any responses that they might return
for middleware_method in handler._request_middleware:
middleware_method(request)
return request
|
https://github.com/wagtail/wagtail/issues/2989
|
[ERROR] [django.request] [exception/handle_uncaught_exception] Internal Server Error: /cmspnl/pages/53/edit/preview/
Traceback (most recent call last):
File "virtualenv\lib\site-packages\django\core\handlers\base.py", line 131, in get_response
response = middleware_method(request, response)
File "virtualenv\lib\site-packages\debug_toolbar\middleware.py", line 122, in process_response
bits[-2] += toolbar.render_toolbar()
File "virtualenv\lib\site-packages\debug_toolbar\toolbar.py", line 60, in render_toolbar
if not self.should_render_panels():
File "virtualenv\lib\site-packages\debug_toolbar\toolbar.py", line 81, in should_render_panels
render_panels = self.request.META['wsgi.multiprocess']
KeyError: 'wsgi.multiprocess'
|
KeyError
|
def get_searchable_content(self, value):
# Return the display value as the searchable value
text_value = force_text(value)
for k, v in self.field.choices:
if isinstance(v, (list, tuple)):
# This is an optgroup, so look inside the group for options
for k2, v2 in v:
if value == k2 or text_value == force_text(k2):
return [force_text(k), force_text(v2)]
else:
if value == k or text_value == force_text(k):
return [force_text(v)]
return [] # Value was not found in the list of choices
|
def get_searchable_content(self, value):
# Return the display value as the searchable value
text_value = force_text(value)
for k, v in self.field.choices:
if isinstance(v, (list, tuple)):
# This is an optgroup, so look inside the group for options
for k2, v2 in v:
if value == k2 or text_value == force_text(k2):
return [k, v2]
else:
if value == k or text_value == force_text(k):
return [v]
return [] # Value was not found in the list of choices
|
https://github.com/wagtail/wagtail/issues/2928
|
→ python manage.py update_index
Updating backend: default
default: Rebuilding index wagtail_serialization
default: home.HomePage Traceback (most recent call last):
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/elasticsearch/serializer.py", line 45, in dumps
return json.dumps(data, default=self.default)
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/json/__init__.py", line 237, in dumps
**kw).encode(obj)
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/json/encoder.py", line 192, in encode
chunks = self.iterencode(o, _one_shot=True)
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/json/encoder.py", line 250, in iterencode
return _iterencode(o, 0)
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/elasticsearch/serializer.py", line 31, in default
raise TypeError("Unable to serialize %r (type: %s)" % (data, type(data)))
TypeError: Unable to serialize <django.utils.functional.lazy.<locals>.__proxy__ object at 0x111889fd0> (type: <class 'django.utils.functional.lazy.<locals>.__proxy__'>)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "manage.py", line 12, in <module>
execute_from_command_line(sys.argv)
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/django/core/management/__init__.py", line 346, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/django/core/management/base.py", line 394, in run_from_argv
self.execute(*args, **cmd_options)
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/django/core/management/base.py", line 445, in execute
output = self.handle(*args, **options)
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/wagtail/wagtailsearch/management/commands/update_index.py", line 120, in handle
self.update_backend(backend_name, schema_only=options['schema_only'])
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/wagtail/wagtailsearch/management/commands/update_index.py", line 87, in update_backend
index.add_items(model, chunk)
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/wagtail/wagtailsearch/backends/elasticsearch.py", line 529, in add_items
bulk(self.es, actions)
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/elasticsearch/helpers/__init__.py", line 188, in bulk
for ok, item in streaming_bulk(client, actions, **kwargs):
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/elasticsearch/helpers/__init__.py", line 159, in streaming_bulk
for bulk_actions in _chunk_actions(actions, chunk_size, max_chunk_bytes, client.transport.serializer):
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/elasticsearch/helpers/__init__.py", line 58, in _chunk_actions
data = serializer.dumps(data)
File "/Users/moritz/python/buchbasel/lib/python3.4/site-packages/elasticsearch/serializer.py", line 47, in dumps
raise SerializationError(data, e)
elasticsearch.exceptions.SerializationError: ({'title': 'Homepage', 'path_filter': '00010001', 'live_filter': True, 'body': [<django.utils.functional.lazy.<locals>.__proxy__ object at 0x111889fd0>], 'content_type_id_filter': 2, 'content_type': 'wagtailcore_page_home_homepage', 'id_filter': 3, 'show_in_menus_filter': False, '_partials': ['Homepage'], 'depth_filter': 2, 'first_published_at_filter': datetime.datetime(2016, 8, 19, 6, 19, 24, 47945, tzinfo=<UTC>), 'owner_id_filter': None, 'locked_filter': False, 'pk': '3', 'latest_revision_created_at_filter': datetime.datetime(2016, 8, 19, 6, 19, 23, 999422, tzinfo=<UTC>)}, TypeError("Unable to serialize <django.utils.functional.lazy.<locals>.__proxy__ object at 0x111889fd0> (type: <class 'django.utils.functional.lazy.<locals>.__proxy__'>)",))
|
TypeError
|
def preview_on_create(
request, content_type_app_name, content_type_model_name, parent_page_id
):
# Receive the form submission that would typically be posted to the 'create' view. If submission is valid,
# return the rendered page; if not, re-render the edit form
try:
content_type = ContentType.objects.get_by_natural_key(
content_type_app_name, content_type_model_name
)
except ContentType.DoesNotExist:
raise Http404
page_class = content_type.model_class()
page = page_class()
edit_handler_class = page_class.get_edit_handler()
form_class = edit_handler_class.get_form_class(page_class)
parent_page = get_object_or_404(Page, id=parent_page_id).specific
form = form_class(
request.POST, request.FILES, instance=page, parent_page=parent_page
)
if form.is_valid():
form.save(commit=False)
# We need to populate treebeard's path / depth fields in order to pass validation.
# We can't make these 100% consistent with the rest of the tree without making actual
# database changes (such as incrementing the parent's numchild field), but by
# calling treebeard's internal _get_path method, we can set a 'realistic' value that
# will hopefully enable tree traversal operations to at least partially work.
page.depth = parent_page.depth + 1
if parent_page.is_leaf():
# set the path as the first child of parent_page
page.path = page._get_path(parent_page.path, page.depth, 1)
else:
# add the new page after the last child of parent_page
page.path = parent_page.get_last_child()._inc_path()
# ensure that our unsaved page instance has a suitable url set
page.set_url_path(parent_page)
page.full_clean()
# Set treebeard attributes
page.depth = parent_page.depth + 1
page.path = Page._get_children_path_interval(parent_page.path)[1]
preview_mode = request.GET.get("mode", page.default_preview_mode)
response = page.serve_preview(page.dummy_request(), preview_mode)
response["X-Wagtail-Preview"] = "ok"
return response
else:
edit_handler = edit_handler_class(instance=page, form=form)
response = render(
request,
"wagtailadmin/pages/create.html",
{
"content_type": content_type,
"page_class": page_class,
"parent_page": parent_page,
"edit_handler": edit_handler,
"preview_modes": page.preview_modes,
"form": form,
},
)
response["X-Wagtail-Preview"] = "error"
return response
|
def preview_on_create(
request, content_type_app_name, content_type_model_name, parent_page_id
):
# Receive the form submission that would typically be posted to the 'create' view. If submission is valid,
# return the rendered page; if not, re-render the edit form
try:
content_type = ContentType.objects.get_by_natural_key(
content_type_app_name, content_type_model_name
)
except ContentType.DoesNotExist:
raise Http404
page_class = content_type.model_class()
page = page_class()
edit_handler_class = page_class.get_edit_handler()
form_class = edit_handler_class.get_form_class(page_class)
parent_page = get_object_or_404(Page, id=parent_page_id).specific
form = form_class(
request.POST, request.FILES, instance=page, parent_page=parent_page
)
if form.is_valid():
form.save(commit=False)
# We need to populate treebeard's path / depth fields in order to pass validation.
# We can't make these 100% consistent with the rest of the tree without making actual
# database changes (such as incrementing the parent's numchild field), but by
# calling treebeard's internal _get_path method, we can set a 'realistic' value that
# will hopefully enable tree traversal operations to at least partially work.
page.depth = parent_page.depth + 1
page.path = page._get_path(
parent_page.path, page.depth, parent_page.numchild + 1
)
# ensure that our unsaved page instance has a suitable url set
page.set_url_path(parent_page)
page.full_clean()
# Set treebeard attributes
page.depth = parent_page.depth + 1
page.path = Page._get_children_path_interval(parent_page.path)[1]
preview_mode = request.GET.get("mode", page.default_preview_mode)
response = page.serve_preview(page.dummy_request(), preview_mode)
response["X-Wagtail-Preview"] = "ok"
return response
else:
edit_handler = edit_handler_class(instance=page, form=form)
response = render(
request,
"wagtailadmin/pages/create.html",
{
"content_type": content_type,
"page_class": page_class,
"parent_page": parent_page,
"edit_handler": edit_handler,
"preview_modes": page.preview_modes,
"form": form,
},
)
response["X-Wagtail-Preview"] = "error"
return response
|
https://github.com/wagtail/wagtail/issues/2599
|
Internal Server Error: /admin/pages/add/core/articlepage/24/preview/
Traceback (most recent call last):
File "/var/www/inner.kiwi/.pyenv/versions/inner.kiwi/lib/python3.5/site-packages/django/core/handlers/base.py", line 132, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/var/www/inner.kiwi/.pyenv/versions/inner.kiwi/lib/python3.5/site-packages/django/views/decorators/cache.py", line 43, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/var/www/inner.kiwi/.pyenv/versions/inner.kiwi/lib/python3.5/site-packages/wagtail/wagtailadmin/decorators.py", line 22, in decorated_view
return view_func(request, *args, **kwargs)
File "/var/www/inner.kiwi/.pyenv/versions/inner.kiwi/lib/python3.5/site-packages/wagtail/wagtailadmin/views/pages.py", line 530, in preview_on_create
page.full_clean()
File "/var/www/inner.kiwi/.pyenv/versions/inner.kiwi/lib/python3.5/site-packages/wagtail/wagtailcore/models.py", line 436, in full_clean
super(Page, self).full_clean(*args, **kwargs)
File "/var/www/inner.kiwi/.pyenv/versions/inner.kiwi/lib/python3.5/site-packages/django/db/models/base.py", line 1171, in full_clean
raise ValidationError(errors)
django.core.exceptions.ValidationError: {'path': ['Page with this Path already exists.']}
|
django.core.exceptions.ValidationError
|
def get_form_class(cls, model):
"""
Construct a form class that has all the fields and formsets named in
the children of this edit handler.
"""
if cls._form_class is None:
# If a custom form class was passed to the EditHandler, use it.
# Otherwise, use the base_form_class from the model.
# If that is not defined, use WagtailAdminModelForm.
model_form_class = getattr(model, "base_form_class", WagtailAdminModelForm)
base_form_class = cls.base_form_class or model_form_class
cls._form_class = get_form_for_model(
model,
form_class=base_form_class,
fields=cls.required_fields(),
formsets=cls.required_formsets(),
widgets=cls.widget_overrides(),
)
return cls._form_class
|
def get_form_class(cls, model):
"""
Construct a form class that has all the fields and formsets named in
the children of this edit handler.
"""
if cls._form_class is None:
cls._form_class = get_form_for_model(
model,
form_class=cls.base_form_class,
fields=cls.required_fields(),
formsets=cls.required_formsets(),
widgets=cls.widget_overrides(),
)
return cls._form_class
|
https://github.com/wagtail/wagtail/issues/2267
|
Traceback (most recent call last):
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/core/handlers/base.py", line 149, in get_response
response = self.process_exception_by_middleware(e, request)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/core/handlers/base.py", line 147, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/views/decorators/cache.py", line 43, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/Users/matt/Dev/MyProject/MyApp/MyApp/wagtail/wagtailadmin/views/pages.py", line 169, in create
parent_page=parent_page)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/modelcluster/forms.py", line 208, in __init__
super(ClusterForm, self).__init__(data, files, instance=instance, prefix=prefix, **kwargs)
TypeError: __init__() got an unexpected keyword argument 'parent_page'
|
TypeError
|
def __init__(self, children, base_form_class=None):
self.children = children
self.base_form_class = base_form_class
|
def __init__(self, children, base_form_class=BaseFormEditHandler.base_form_class):
self.children = children
self.base_form_class = base_form_class
|
https://github.com/wagtail/wagtail/issues/2267
|
Traceback (most recent call last):
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/core/handlers/base.py", line 149, in get_response
response = self.process_exception_by_middleware(e, request)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/core/handlers/base.py", line 147, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/views/decorators/cache.py", line 43, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/Users/matt/Dev/MyProject/MyApp/MyApp/wagtail/wagtailadmin/views/pages.py", line 169, in create
parent_page=parent_page)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/modelcluster/forms.py", line 208, in __init__
super(ClusterForm, self).__init__(data, files, instance=instance, prefix=prefix, **kwargs)
TypeError: __init__() got an unexpected keyword argument 'parent_page'
|
TypeError
|
def __init__(self, children, heading="", classname="", base_form_class=None):
self.children = children
self.heading = heading
self.classname = classname
self.base_form_class = base_form_class
|
def __init__(
self,
children,
heading="",
classname="",
base_form_class=BaseFormEditHandler.base_form_class,
):
self.children = children
self.heading = heading
self.classname = classname
self.base_form_class = base_form_class
|
https://github.com/wagtail/wagtail/issues/2267
|
Traceback (most recent call last):
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/core/handlers/base.py", line 149, in get_response
response = self.process_exception_by_middleware(e, request)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/core/handlers/base.py", line 147, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/views/decorators/cache.py", line 43, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/Users/matt/Dev/MyProject/MyApp/MyApp/wagtail/wagtailadmin/views/pages.py", line 169, in create
parent_page=parent_page)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/modelcluster/forms.py", line 208, in __init__
super(ClusterForm, self).__init__(data, files, instance=instance, prefix=prefix, **kwargs)
TypeError: __init__() got an unexpected keyword argument 'parent_page'
|
TypeError
|
def check(cls, **kwargs):
errors = super(Page, cls).check(**kwargs)
# Check that foreign keys from pages are not configured to cascade
# This is the default Django behaviour which must be explicitly overridden
# to prevent pages disappearing unexpectedly and the tree being corrupted
# get names of foreign keys pointing to parent classes (such as page_ptr)
field_exceptions = [
field.name
for model in [cls] + list(cls._meta.get_parent_list())
for field in model._meta.parents.values()
if field
]
for field in cls._meta.fields:
if isinstance(field, models.ForeignKey) and field.name not in field_exceptions:
if field.rel.on_delete == models.CASCADE:
errors.append(
checks.Warning(
"Field hasn't specified on_delete action",
hint="Set on_delete=models.SET_NULL and make sure the field is nullable.",
obj=field,
id="wagtailcore.W001",
)
)
if not isinstance(cls.objects, PageManager):
errors.append(
checks.Error(
"Manager does not inherit from PageManager",
hint="Ensure that custom Page managers inherit from {}.{}".format(
PageManager.__module__, PageManager.__name__
),
obj=cls,
id="wagtailcore.E002",
)
)
try:
cls.clean_subpage_models()
except (ValueError, LookupError) as e:
errors.append(
checks.Error(
"Invalid subpage_types setting for %s" % cls,
hint=str(e),
id="wagtailcore.E002",
)
)
try:
cls.clean_parent_page_models()
except (ValueError, LookupError) as e:
errors.append(
checks.Error(
"Invalid parent_page_types setting for %s" % cls,
hint=str(e),
id="wagtailcore.E002",
)
)
from wagtail.wagtailadmin.forms import WagtailAdminPageForm
if not issubclass(cls.base_form_class, WagtailAdminPageForm):
errors.append(
checks.Error(
"{}.base_form_class does not extend WagtailAdminPageForm".format(
cls.__name__
),
hint="Ensure that {}.{} extends WagtailAdminPageForm".format(
cls.base_form_class.__module__, cls.base_form_class.__name__
),
obj=cls,
id="wagtailcore.E002",
)
)
edit_handler = cls.get_edit_handler()
if not issubclass(edit_handler.get_form_class(cls), WagtailAdminPageForm):
errors.append(
checks.Error(
"{cls}.get_edit_handler().get_form_class({cls}) does not extend WagtailAdminPageForm".format(
cls=cls.__name__
),
hint="Ensure that the EditHandler for {cls} creates a subclass of WagtailAdminPageForm".format(
cls=cls.__name__
),
obj=cls,
id="wagtailcore.E003",
)
)
return errors
|
def check(cls, **kwargs):
errors = super(Page, cls).check(**kwargs)
# Check that foreign keys from pages are not configured to cascade
# This is the default Django behaviour which must be explicitly overridden
# to prevent pages disappearing unexpectedly and the tree being corrupted
# get names of foreign keys pointing to parent classes (such as page_ptr)
field_exceptions = [
field.name
for model in [cls] + list(cls._meta.get_parent_list())
for field in model._meta.parents.values()
if field
]
for field in cls._meta.fields:
if isinstance(field, models.ForeignKey) and field.name not in field_exceptions:
if field.rel.on_delete == models.CASCADE:
errors.append(
checks.Warning(
"Field hasn't specified on_delete action",
hint="Set on_delete=models.SET_NULL and make sure the field is nullable.",
obj=field,
id="wagtailcore.W001",
)
)
if not isinstance(cls.objects, PageManager):
errors.append(
checks.Error(
"Manager does not inherit from PageManager",
hint="Ensure that custom Page managers inherit from {}.{}".format(
PageManager.__module__, PageManager.__name__
),
obj=cls,
id="wagtailcore.E002",
)
)
try:
cls.clean_subpage_models()
except (ValueError, LookupError) as e:
errors.append(
checks.Error(
"Invalid subpage_types setting for %s" % cls,
hint=str(e),
id="wagtailcore.E002",
)
)
try:
cls.clean_parent_page_models()
except (ValueError, LookupError) as e:
errors.append(
checks.Error(
"Invalid parent_page_types setting for %s" % cls,
hint=str(e),
id="wagtailcore.E002",
)
)
from wagtail.wagtailadmin.forms import WagtailAdminPageForm
if not issubclass(cls.base_form_class, WagtailAdminPageForm):
errors.append(
checks.Error(
"base_form_class does not extend WagtailAdminPageForm",
hint="Ensure that {}.{} extends WagtailAdminPageForm".format(
cls.base_form_class.__module__, cls.base_form_class.__name__
),
obj=cls,
id="wagtailcore.E002",
)
)
# Sadly, there is no way of checking the form class returned from
# cls.get_edit_handler().get_form_class(cls), as these calls can hit
# the DB in order to fetch content types.
return errors
|
https://github.com/wagtail/wagtail/issues/2267
|
Traceback (most recent call last):
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/core/handlers/base.py", line 149, in get_response
response = self.process_exception_by_middleware(e, request)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/core/handlers/base.py", line 147, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/views/decorators/cache.py", line 43, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/Users/matt/Dev/MyProject/MyApp/MyApp/wagtail/wagtailadmin/views/pages.py", line 169, in create
parent_page=parent_page)
File "/Users/matt/Dev/MyProject/MyApp/lib/python3.4/site-packages/modelcluster/forms.py", line 208, in __init__
super(ClusterForm, self).__init__(data, files, instance=instance, prefix=prefix, **kwargs)
TypeError: __init__() got an unexpected keyword argument 'parent_page'
|
TypeError
|
def process_response(self, request, response):
# No need to check for a redirect for non-404 responses.
if response.status_code != 404:
return response
# If a middleware before `SiteMiddleware` returned a response the
# `site` attribute was never set, ref #2120
if not hasattr(request, "site"):
return response
# Get the path
path = models.Redirect.normalise_path(request.get_full_path())
# Get the path without the query string or params
path_without_query = urlparse(path).path
# Find redirect
try:
redirect = models.Redirect.get_for_site(request.site).get(old_path=path)
except models.Redirect.DoesNotExist:
if path == path_without_query:
# don't try again if we know we will get the same response
return response
try:
redirect = models.Redirect.get_for_site(request.site).get(
old_path=path_without_query
)
except models.Redirect.DoesNotExist:
return response
if redirect.is_permanent:
return http.HttpResponsePermanentRedirect(redirect.link)
else:
return http.HttpResponseRedirect(redirect.link)
|
def process_response(self, request, response):
# No need to check for a redirect for non-404 responses.
if response.status_code != 404:
return response
# Get the path
path = models.Redirect.normalise_path(request.get_full_path())
# Get the path without the query string or params
path_without_query = urlparse(path)[2]
# Find redirect
try:
redirect = models.Redirect.get_for_site(request.site).get(old_path=path)
except models.Redirect.DoesNotExist:
if path == path_without_query:
# don't try again if we know we will get the same response
return response
try:
redirect = models.Redirect.get_for_site(request.site).get(
old_path=path_without_query
)
except models.Redirect.DoesNotExist:
return response
if redirect.is_permanent:
return http.HttpResponsePermanentRedirect(redirect.link)
else:
return http.HttpResponseRedirect(redirect.link)
|
https://github.com/wagtail/wagtail/issues/2120
|
Internal Server Error: /page/foo
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/django/core/handlers/base.py", line 223, in get_response
response = middleware_method(request, response)
File "/usr/local/lib/python2.7/dist-packages/wagtail/wagtailredirects/middleware.py", line 22, in process_response
redirect = models.Redirect.get_for_site(request.site).get(old_path=path)
AttributeError: 'WSGIRequest' object has no attribute 'site'
|
AttributeError
|
def __init__(self, content_type=None, **kwargs):
super(AdminPageChooser, self).__init__(**kwargs)
self._content_type = content_type
|
def __init__(self, content_type=None, **kwargs):
super(AdminPageChooser, self).__init__(**kwargs)
self.target_content_types = content_type or ContentType.objects.get_for_model(Page)
# Make sure target_content_types is a list or tuple
if not isinstance(self.target_content_types, (list, tuple)):
self.target_content_types = [self.target_content_types]
|
https://github.com/wagtail/wagtail/issues/1673
|
Traceback (most recent call last):
File "./manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/core/management/__init__.py", line 338, in execute_from_command_line
utility.execute()
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/core/management/__init__.py", line 312, in execute
django.setup()
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/__init__.py", line 18, in setup
apps.populate(settings.INSTALLED_APPS)
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/apps/registry.py", line 115, in populate
app_config.ready()
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/debug_toolbar/apps.py", line 15, in ready
dt_settings.patch_all()
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/debug_toolbar/settings.py", line 232, in patch_all
patch_root_urlconf()
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/debug_toolbar/settings.py", line 220, in patch_root_urlconf
reverse('djdt:render_panel')
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/core/urlresolvers.py", line 550, in reverse
app_list = resolver.app_dict[ns]
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/core/urlresolvers.py", line 352, in app_dict
self._populate()
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/core/urlresolvers.py", line 285, in _populate
for pattern in reversed(self.url_patterns):
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/core/urlresolvers.py", line 402, in url_patterns
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/core/urlresolvers.py", line 396, in urlconf_module
self._urlconf_module = import_module(self.urlconf_name)
File "/usr/local/Cellar/python/2.7.10_2/Frameworks/Python.framework/Versions/2.7/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/Users/mvantellingen/projects/myorg/myproj/src/myproj/urls.py", line 7, in <module>
from wagtail.wagtailadmin import urls as wagtailadmin_urls
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/wagtail/wagtailadmin/urls/__init__.py", line 7, in <module>
from wagtail.wagtailadmin.views import account, chooser, home, pages, tags, userbar
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/wagtail/wagtailadmin/views/account.py", line 12, in <module>
from wagtail.wagtailusers.forms import NotificationPreferencesForm
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/wagtail/wagtailusers/forms.py", line 236, in <module>
class GroupPagePermissionForm(forms.ModelForm):
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/wagtail/wagtailusers/forms.py", line 238, in GroupPagePermissionForm
widget=AdminPageChooser(show_edit_link=False))
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/wagtail/wagtailadmin/widgets.py", line 123, in __init__
self.target_content_types = content_type or ContentType.objects.get_for_model(Page)
File "/usr/local/opt/pyenv/versions/myproj/lib/python2.7/site-packages/django/contrib/contenttypes/models.py", line 78, in get_for_model
"Error creating new content types. Please make sure contenttypes "
RuntimeError: Error creating new content types. Please make sure contenttypes is migrated before trying to migrate apps individually.
|
RuntimeError
|
def get_willow_image(self):
# Open file if it is closed
close_file = False
try:
image_file = self.file
if self.file.closed:
# Reopen the file
if self.is_stored_locally():
self.file.open("rb")
else:
# Some external storage backends don't allow reopening
# the file. Get a fresh file instance. #1397
storage = self._meta.get_field("file").storage
image_file = storage.open(self.file.name, "rb")
close_file = True
except IOError as e:
# re-throw this as a SourceImageIOError so that calling code can distinguish
# these from IOErrors elsewhere in the process
raise SourceImageIOError(text_type(e))
# Seek to beginning
image_file.seek(0)
try:
yield WillowImage.open(image_file)
finally:
if close_file:
image_file.close()
|
def get_willow_image(self):
# Open file if it is closed
close_file = False
try:
if self.file.closed:
self.file.open("rb")
close_file = True
except IOError as e:
# re-throw this as a SourceImageIOError so that calling code can distinguish
# these from IOErrors elsewhere in the process
raise SourceImageIOError(text_type(e))
# Seek to beginning
self.file.seek(0)
try:
yield WillowImage.open(self.file)
finally:
if close_file:
self.file.close()
|
https://github.com/wagtail/wagtail/issues/1397
|
[11/Jun/2015 13:05:29] "POST /cms/images/chooser/upload/ HTTP/1.1" 500 23947
Internal Server Error: /cms/images/chooser/upload/
Traceback (most recent call last):
File "/Users/john/.virtualenvs/website/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/john/.virtualenvs/website/lib/python2.7/site-packages/django/contrib/auth/decorators.py", line 21, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/Users/john/.virtualenvs/website/lib/python2.7/site-packages/wagtail/wagtailimages/views/chooser.py", line 141, in chooser_upload
{'image_json': get_image_json(image)}
File "/Users/john/.virtualenvs/website/lib/python2.7/site-packages/wagtail/wagtailimages/views/chooser.py", line 22, in get_image_json
preview_image = image.get_rendition('max-130x100')
File "/Users/john/.virtualenvs/website/lib/python2.7/site-packages/wagtail/wagtailimages/models.py", line 196, in get_rendition
generated_image = filter.process_image(image_file, backend_name=backend_name, focal_point=self.get_focal_point())
File "/Users/john/.virtualenvs/website/lib/python2.7/site-packages/wagtail/wagtailimages/models.py", line 371, in process_image
input_file.open('rb')
File "/Users/john/.virtualenvs/website/lib/python2.7/site-packages/django/core/files/base.py", line 128, in open
raise ValueError("The file cannot be reopened.")
ValueError: The file cannot be reopened.
|
ValueError
|
def purge_page_from_cache(page, backend_settings=None, backends=None):
page_url = page.full_url
if page_url is None: # nothing to be done if the page has no routable URL
return
for backend_name, backend in get_backends(
backend_settings=backend_settings, backends=backends
).items():
# Purge cached paths from cache
for path in page.specific.get_cached_paths():
logger.info("[%s] Purging URL: %s", backend_name, page_url + path[1:])
backend.purge(page_url + path[1:])
|
def purge_page_from_cache(page, backend_settings=None, backends=None):
for backend_name, backend in get_backends(
backend_settings=backend_settings, backends=backends
).items():
# Purge cached paths from cache
for path in page.specific.get_cached_paths():
logger.info("[%s] Purging URL: %s", backend_name, page.full_url + path[1:])
backend.purge(page.full_url + path[1:])
|
https://github.com/wagtail/wagtail/issues/1208
|
[17/Apr/2015 20:02:28] ERROR [django.request:231] Internal Server Error: /admin/pages/new/pages/genericpage/1/
Traceback (most recent call last):
File "/Users/jryding/.virtualenvs/cms/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/jryding/.virtualenvs/cms/lib/python2.7/site-packages/django/contrib/auth/decorators.py", line 21, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/Users/jryding/.virtualenvs/cms/lib/python2.7/site-packages/wagtail/wagtailadmin/views/pages.py", line 211, in create
revision.publish()
File "/Users/jryding/.virtualenvs/cms/lib/python2.7/site-packages/wagtail/wagtailcore/models.py", line 1141, in publish
page_published.send(sender=page.specific_class, instance=page.specific)
File "/Users/jryding/.virtualenvs/cms/lib/python2.7/site-packages/django/dispatch/dispatcher.py", line 198, in send
response = receiver(signal=self, sender=sender, **named)
File "/Users/jryding/.virtualenvs/cms/lib/python2.7/site-packages/wagtail/contrib/wagtailfrontendcache/signal_handlers.py", line 9, in page_published_signal_handler
purge_page_from_cache(instance)
File "/Users/jryding/.virtualenvs/cms/lib/python2.7/site-packages/wagtail/contrib/wagtailfrontendcache/utils.py", line 100, in purge_page_from_cache
logger.info("[%s] Purging URL: %s", backend_name, page.full_url + path[1:])
TypeError: unsupported operand type(s) for +: 'NoneType' and 'str'
|
TypeError
|
def edit(request, image_id):
Image = get_image_model()
ImageForm = get_image_form(Image)
image = get_object_or_404(Image, id=image_id)
if not image.is_editable_by_user(request.user):
raise PermissionDenied
if request.POST:
original_file = image.file
form = ImageForm(request.POST, request.FILES, instance=image)
if form.is_valid():
if "file" in form.changed_data:
# if providing a new image file, delete the old one and all renditions.
# NB Doing this via original_file.delete() clears the file field,
# which definitely isn't what we want...
original_file.storage.delete(original_file.name)
image.renditions.all().delete()
form.save()
# Reindex the image to make sure all tags are indexed
for backend in get_search_backends():
backend.add(image)
messages.success(
request,
_("Image '{0}' updated.").format(image.title),
buttons=[
messages.button(
reverse("wagtailimages_edit_image", args=(image.id,)),
_("Edit again"),
)
],
)
return redirect("wagtailimages_index")
else:
messages.error(request, _("The image could not be saved due to errors."))
else:
form = ImageForm(instance=image)
# Check if we should enable the frontend url generator
try:
reverse("wagtailimages_serve", args=("foo", "1", "bar"))
url_generator_enabled = True
except NoReverseMatch:
url_generator_enabled = False
# Get file size
try:
filesize = image.file.size
except OSError:
# File doesn't exist
filesize = None
return render(
request,
"wagtailimages/images/edit.html",
{
"image": image,
"form": form,
"url_generator_enabled": url_generator_enabled,
"filesize": filesize,
},
)
|
def edit(request, image_id):
Image = get_image_model()
ImageForm = get_image_form(Image)
image = get_object_or_404(Image, id=image_id)
if not image.is_editable_by_user(request.user):
raise PermissionDenied
if request.POST:
original_file = image.file
form = ImageForm(request.POST, request.FILES, instance=image)
if form.is_valid():
if "file" in form.changed_data:
# if providing a new image file, delete the old one and all renditions.
# NB Doing this via original_file.delete() clears the file field,
# which definitely isn't what we want...
original_file.storage.delete(original_file.name)
image.renditions.all().delete()
form.save()
# Reindex the image to make sure all tags are indexed
for backend in get_search_backends():
backend.add(image)
messages.success(
request,
_("Image '{0}' updated.").format(image.title),
buttons=[
messages.button(
reverse("wagtailimages_edit_image", args=(image.id,)),
_("Edit again"),
)
],
)
return redirect("wagtailimages_index")
else:
messages.error(request, _("The image could not be saved due to errors."))
else:
form = ImageForm(instance=image)
# Check if we should enable the frontend url generator
try:
reverse("wagtailimages_serve", args=("foo", "1", "bar"))
url_generator_enabled = True
except NoReverseMatch:
url_generator_enabled = False
return render(
request,
"wagtailimages/images/edit.html",
{
"image": image,
"form": form,
"url_generator_enabled": url_generator_enabled,
},
)
|
https://github.com/wagtail/wagtail/issues/935
|
Traceback (most recent call last):
File "/srv/django/site/env/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/srv/django/site/env/lib/python2.7/site-packages/django/contrib/auth/decorators.py", line 21, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/srv/django/site/env/lib/python2.7/site-packages/wagtail/wagtailimages/views/images.py", line 126, in edit
'url_generator_enabled': url_generator_enabled,
File "/srv/django/site/env/lib/python2.7/site-packages/django/shortcuts.py", line 50, in render
return HttpResponse(loader.render_to_string(*args, **kwargs),
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/loader.py", line 178, in render_to_string
return t.render(context_instance)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 148, in render
return self._render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 142, in _render
return self.nodelist.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 844, in render
bit = self.render_node(node, context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 858, in render_node
return node.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/loader_tags.py", line 126, in render
return compiled_parent._render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 142, in _render
return self.nodelist.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 844, in render
bit = self.render_node(node, context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 858, in render_node
return node.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/loader_tags.py", line 126, in render
return compiled_parent._render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 142, in _render
return self.nodelist.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 844, in render
bit = self.render_node(node, context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 858, in render_node
return node.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/loader_tags.py", line 126, in render
return compiled_parent._render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 142, in _render
return self.nodelist.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 844, in render
bit = self.render_node(node, context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 858, in render_node
return node.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/loader_tags.py", line 65, in render
result = block.nodelist.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 844, in render
bit = self.render_node(node, context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 858, in render_node
return node.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/loader_tags.py", line 65, in render
result = block.nodelist.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 844, in render
bit = self.render_node(node, context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 858, in render_node
return node.render(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 898, in render
output = self.filter_expression.resolve(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 596, in resolve
obj = self.var.resolve(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 734, in resolve
value = self._resolve_lookup(context)
File "/srv/django/site/env/lib/python2.7/site-packages/django/template/base.py", line 770, in _resolve_lookup
current = getattr(current, bit)
File "/srv/django/site/env/lib/python2.7/site-packages/django/db/models/fields/files.py", line 74, in _get_size
return self.storage.size(self.name)
File "/srv/django/site/env/lib/python2.7/site-packages/django/core/files/storage.py", line 285, in size
return os.path.getsize(self.path(name))
File "/srv/django/site/env/lib/python2.7/genericpath.py", line 49, in getsize
return os.stat(filename).st_size
OSError: [Errno 2] No such file or directory: '/srv/django/site/media/original_images/img-17686908.jpg'
|
OSError
|
def create_project(parser, options, args):
# Validate args
if len(args) < 2:
parser.error("Please specify a name for your wagtail installation")
elif len(args) > 2:
parser.error("Too many arguments")
project_name = args[1]
# Make sure given name is not already in use by another python package/module.
try:
__import__(project_name)
except ImportError:
pass
else:
parser.error(
"'%s' conflicts with the name of an existing "
"Python module and cannot be used as a project "
"name. Please try another name." % project_name
)
# Make sure directory does not already exist
if os.path.exists(project_name):
print(
"A directory called %(project_name)s already exists. \
Please choose another name for your wagtail project or remove the existing directory."
% {"project_name": project_name}
)
sys.exit(errno.EEXIST)
print(
"Creating a wagtail project called %(project_name)s"
% {"project_name": project_name}
)
# Create the project from the wagtail template using startapp
# First find the path to wagtail
import wagtail
wagtail_path = os.path.dirname(wagtail.__file__)
template_path = os.path.join(wagtail_path, "project_template")
# Call django-admin startproject
utility = ManagementUtility(
[
"django-admin.py",
"startproject",
"--template=" + template_path,
"--name=Vagrantfile",
"--ext=html,rst",
project_name,
]
)
utility.execute()
print("Success! %(project_name)s is created" % {"project_name": project_name})
|
def create_project(parser, options, args):
# Validate args
if len(args) < 2:
parser.error("Please specify a name for your wagtail installation")
elif len(args) > 2:
parser.error("Too many arguments")
project_name = args[1]
# Make sure given name is not already in use by another python package/module.
try:
__import__(project_name)
except ImportError:
pass
else:
parser.error(
"'%s' conflicts with the name of an existing "
"Python module and cannot be used as a project "
"name. Please try another name." % project_name
)
# Make sure directory does not already exist
if os.path.exists(project_name):
print(
"A directory called %(project_name)s already exists. \
Please choose another name for your wagtail project or remove the existing directory."
% {"project_name": project_name}
)
sys.exit(errno.EEXIST)
print(
"Creating a wagtail project called %(project_name)s"
% {"project_name": project_name}
)
# Create the project from the wagtail template using startapp
# First find the path to wagtail
import wagtail
wagtail_path = os.path.dirname(wagtail.__file__)
template_path = os.path.join(wagtail_path, "project_template")
# Call django-admin startproject
result = subprocess.call(
[
"django-admin.py",
"startproject",
"--template=" + template_path,
"--name=Vagrantfile",
"--ext=html,rst",
project_name,
]
)
if result == 0:
print("Success! %(project_name)s is created" % {"project_name": project_name})
|
https://github.com/wagtail/wagtail/issues/625
|
$ wagtail start wagtailtest
Creating a wagtail project called wagtailtest
Traceback (most recent call last):
File "d:\VirtualEnvs\wagtail_env\Scripts\wagtail-script.py", line 9, in <module>
load_entry_point('wagtail==0.6', 'console_scripts', 'wagtail')()
File "d:\VirtualEnvs\wagtail_env\lib\site-packages\wagtail\bin\wagtail.py", line 75, in main
COMMANDS[command](parser, options, args)
File "d:\VirtualEnvs\wagtail_env\lib\site-packages\wagtail\bin\wagtail.py", line 51, in create_project
project_name
File "C:\Python27\Lib\subprocess.py", line 522, in call
return Popen(*popenargs, **kwargs).wait()
File "C:\Python27\Lib\subprocess.py", line 710, in __init__
errread, errwrite)
File "C:\Python27\Lib\subprocess.py", line 958, in _execute_child
startupinfo)
WindowsError: [Error 193] %1 is not a valid Win32 application
|
WindowsError
|
def serialize(self, form):
data = {}
for key in form.inputs.keys():
input = form.inputs[key]
if getattr(input, "type", "") == "submit":
try:
form.remove(input)
# Issue 595: throws ValueError: Element not child of this node
except ValueError:
pass
for k, v in form.fields.items():
if v is None:
continue
if isinstance(v, lxml.html.MultipleSelectOptions):
data[k] = [val for val in v]
else:
data[k] = v
for key in form.inputs.keys():
input = form.inputs[key]
if getattr(input, "type", "") == "file" and key in data:
data[key] = open(data[key], "rb")
return data
|
def serialize(self, form):
data = {}
for key in form.inputs.keys():
input = form.inputs[key]
if getattr(input, "type", "") == "submit":
form.remove(input)
for k, v in form.fields.items():
if v is None:
continue
if isinstance(v, lxml.html.MultipleSelectOptions):
data[k] = [val for val in v]
else:
data[k] = v
for key in form.inputs.keys():
input = form.inputs[key]
if getattr(input, "type", "") == "file" and key in data:
data[key] = open(data[key], "rb")
return data
|
https://github.com/cobrateam/splinter/issues/595
|
Traceback (most recent call last):
File "/home/suresh/appt/myproject/appt/tests/test_blackout2.py", line 96, in test_delete
browser.find_by_name('login').click()
File "/home/suresh/appt/local/lib/python3.5/site-packages/splinter/driver/lxmldriver.py", line 397, in click
return self.parent.submit_data(parent_form)
File "/home/suresh/appt/local/lib/python3.5/site-packages/splinter/driver/djangoclient.py", line 121, in submit_data
return super(DjangoClient, self).submit(form).content
File "/home/suresh/appt/local/lib/python3.5/site-packages/splinter/driver/lxmldriver.py", line 74, in submit
data = self.serialize(form)
File "/home/suresh/appt/local/lib/python3.5/site-packages/splinter/driver/lxmldriver.py", line 48, in serialize
form.remove(input)
File "src/lxml/lxml.etree.pyx", line 950, in lxml.etree._Element.remove (src/lxml/lxml.etree.c:50799)
ValueError: Element is not a child of this node.
|
ValueError
|
def connect(self, url):
if not (url.startswith("file:") or url.startswith("about:")):
self.request_url = url
self._create_connection()
self._store_response()
self.conn.close()
else:
self.status_code = StatusCode(200, "Ok")
|
def connect(self, url):
if not url.startswith("file:"):
self.request_url = url
self._create_connection()
self._store_response()
self.conn.close()
else:
self.status_code = StatusCode(200, "Ok")
|
https://github.com/cobrateam/splinter/issues/233
|
from splinter import Browser
browser = Browser()
browser.visit('about:blank')
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "/Users/lorin/.virtualenvs/myvenv/lib/python2.7/site-packages/splinter/dr
iver/webdriver/__init__.py", line 44, in visit
self.connect(url)
File "/Users/lorin/.virtualenvs/myvenv/lib/python2.7/site-packages/splinter/re
quest_handler/request_handler.py", line 18, in connect
self._create_connection()
File "/Users/lorin/.virtualenvs/myvenv/lib/python2.7/site-packages/splinter/re
quest_handler/request_handler.py", line 43, in _create_connection
self.conn = httplib.HTTPConnection(self.host, self.port)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/h
ttplib.py", line 693, in __init__
self._set_hostport(host, port)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/h
ttplib.py", line 712, in _set_hostport
i = host.rfind(':')
AttributeError: 'NoneType' object has no attribute 'rfind'
|
AttributeError
|
def expect_compound_columns_to_be_unique(
self,
column_list,
ignore_row_if="all_values_are_missing",
result_format=None,
row_condition=None,
condition_parser=None,
include_config=True,
catch_exceptions=None,
meta=None,
):
columns = [
sa.column(col["name"]) for col in self.columns if col["name"] in column_list
]
query = (
sa.select([sa.func.count()])
.group_by(*columns)
.having(sa.func.count() > 1)
.select_from(self._table)
)
if ignore_row_if == "all_values_are_missing":
query = query.where(sa.and_(*[col != None for col in columns]))
elif ignore_row_if == "any_value_is_missing":
query = query.where(sa.or_(*[col != None for col in columns]))
elif ignore_row_if == "never":
pass
else:
raise ValueError(
"ignore_row_if was set to an unexpected value: %s" % ignore_row_if
)
unexpected_count = self.engine.execute(query).fetchone()
if unexpected_count is None:
# This can happen when the condition filters out all rows
unexpected_count = 0
else:
unexpected_count = unexpected_count[0]
total_count_query = sa.select([sa.func.count()]).select_from(self._table)
total_count = self.engine.execute(total_count_query).fetchone()[0]
if total_count > 0:
unexpected_percent = 100.0 * unexpected_count / total_count
else:
# If no rows, then zero percent are unexpected.
unexpected_percent = 0
return {
"success": unexpected_count == 0,
"result": {"unexpected_percent": unexpected_percent},
}
|
def expect_compound_columns_to_be_unique(
self,
column_list,
ignore_row_if="all_values_are_missing",
result_format=None,
row_condition=None,
condition_parser=None,
include_config=True,
catch_exceptions=None,
meta=None,
):
columns = [
sa.column(col["name"]) for col in self.columns if col["name"] in column_list
]
query = (
sa.select([sa.func.count()])
.group_by(*columns)
.having(sa.func.count() > 1)
.select_from(self._table)
)
if ignore_row_if == "all_values_are_missing":
query = query.where(sa.and_(*[col != None for col in columns]))
elif ignore_row_if == "any_value_is_missing":
query = query.where(sa.or_(*[col != None for col in columns]))
elif ignore_row_if == "never":
pass
else:
raise ValueError(
"ignore_row_if was set to an unexpected value: %s" % ignore_row_if
)
unexpected_count = self.engine.execute(query).fetchone()
if unexpected_count is None:
# This can happen when the condition filters out all rows
unexpected_count = 0
else:
unexpected_count = unexpected_count[0]
total_count_query = sa.select([sa.func.count()]).select_from(self._table)
total_count = self.engine.execute(total_count_query).fetchone()[0]
return {
"success": unexpected_count == 0,
"result": {"unexpected_percent": 100.0 * unexpected_count / total_count},
}
|
https://github.com/great-expectations/great_expectations/issues/2451
|
float division by zero
Traceback (most recent call last):
...
File "py_deps/pypi__great_expectations/great_expectations/data_asset/util.py", line 80, in f
return self.mthd(obj, *args, **kwargs)
File "py_deps/pypi__great_expectations/great_expectations/data_asset/data_asset.py", line 275, in wrapper
raise err
File "py_deps/pypi__great_expectations/great_expectations/data_asset/data_asset.py", line 260, in wrapper
return_obj = func(self, **evaluation_args)
File "py_deps/pypi__great_expectations/great_expectations/dataset/sqlalchemy_dataset.py", line 1380, in expect_compound_columns_to_be_unique
"result": {"unexpected_percent": 100.0 * unexpected_count / total_count}
ZeroDivisionError:
|
ZeroDivisionError
|
def __init__(
self,
table_name,
key_columns,
fixed_length_key=True,
credentials=None,
url=None,
connection_string=None,
engine=None,
store_name=None,
suppress_store_backend_id=False,
manually_initialize_store_backend_id: str = "",
**kwargs,
):
super().__init__(
fixed_length_key=fixed_length_key,
suppress_store_backend_id=suppress_store_backend_id,
manually_initialize_store_backend_id=manually_initialize_store_backend_id,
store_name=store_name,
)
if not sa:
raise ge_exceptions.DataContextError(
"ModuleNotFoundError: No module named 'sqlalchemy'"
)
if not self.fixed_length_key:
raise ge_exceptions.InvalidConfigError(
"DatabaseStoreBackend requires use of a fixed-length-key"
)
self._schema_name = None
self._credentials = credentials
self._connection_string = connection_string
self._url = url
if engine is not None:
if credentials is not None:
logger.warning(
"Both credentials and engine were provided during initialization of SqlAlchemyExecutionEngine. "
"Ignoring credentials."
)
self.engine = engine
elif credentials is not None:
self.engine = self._build_engine(credentials=credentials, **kwargs)
elif connection_string is not None:
self.engine = sa.create_engine(connection_string, **kwargs)
elif url is not None:
self.drivername = urlparse(url).scheme
self.engine = sa.create_engine(url, **kwargs)
else:
raise ge_exceptions.InvalidConfigError(
"Credentials, url, connection_string, or an engine are required for a DatabaseStoreBackend."
)
meta = MetaData(schema=self._schema_name)
self.key_columns = key_columns
# Dynamically construct a SQLAlchemy table with the name and column names we'll use
cols = []
for column in key_columns:
if column == "value":
raise ge_exceptions.InvalidConfigError(
"'value' cannot be used as a key_element name"
)
cols.append(Column(column, String, primary_key=True))
cols.append(Column("value", String))
try:
table = Table(table_name, meta, autoload=True, autoload_with=self.engine)
# We do a "light" check: if the columns' names match, we will proceed, otherwise, create the table
if {str(col.name).lower() for col in table.columns} != (
set(key_columns) | {"value"}
):
raise ge_exceptions.StoreBackendError(
f"Unable to use table {table_name}: it exists, but does not have the expected schema."
)
except NoSuchTableError:
table = Table(table_name, meta, *cols)
try:
if self._schema_name:
self.engine.execute(f"CREATE SCHEMA IF NOT EXISTS {self._schema_name};")
meta.create_all(self.engine)
except SQLAlchemyError as e:
raise ge_exceptions.StoreBackendError(
f"Unable to connect to table {table_name} because of an error. It is possible your table needs to be migrated to a new schema. SqlAlchemyError: {str(e)}"
)
self._table = table
# Initialize with store_backend_id
self._store_backend_id = None
self._store_backend_id = self.store_backend_id
|
def __init__(
self,
table_name,
key_columns,
fixed_length_key=True,
credentials=None,
url=None,
connection_string=None,
engine=None,
store_name=None,
suppress_store_backend_id=False,
**kwargs,
):
super().__init__(
fixed_length_key=fixed_length_key,
suppress_store_backend_id=suppress_store_backend_id,
store_name=store_name,
)
if not sa:
raise ge_exceptions.DataContextError(
"ModuleNotFoundError: No module named 'sqlalchemy'"
)
if not self.fixed_length_key:
raise ge_exceptions.InvalidConfigError(
"DatabaseStoreBackend requires use of a fixed-length-key"
)
self._schema_name = None
self._credentials = credentials
self._connection_string = connection_string
self._url = url
if engine is not None:
if credentials is not None:
logger.warning(
"Both credentials and engine were provided during initialization of SqlAlchemyExecutionEngine. "
"Ignoring credentials."
)
self.engine = engine
elif credentials is not None:
self.engine = self._build_engine(credentials=credentials, **kwargs)
elif connection_string is not None:
self.engine = sa.create_engine(connection_string, **kwargs)
elif url is not None:
self.drivername = urlparse(url).scheme
self.engine = sa.create_engine(url, **kwargs)
else:
raise ge_exceptions.InvalidConfigError(
"Credentials, url, connection_string, or an engine are required for a DatabaseStoreBackend."
)
meta = MetaData(schema=self._schema_name)
self.key_columns = key_columns
# Dynamically construct a SQLAlchemy table with the name and column names we'll use
cols = []
for column in key_columns:
if column == "value":
raise ge_exceptions.InvalidConfigError(
"'value' cannot be used as a key_element name"
)
cols.append(Column(column, String, primary_key=True))
cols.append(Column("value", String))
try:
table = Table(table_name, meta, autoload=True, autoload_with=self.engine)
# We do a "light" check: if the columns' names match, we will proceed, otherwise, create the table
if {str(col.name).lower() for col in table.columns} != (
set(key_columns) | {"value"}
):
raise ge_exceptions.StoreBackendError(
f"Unable to use table {table_name}: it exists, but does not have the expected schema."
)
except NoSuchTableError:
table = Table(table_name, meta, *cols)
try:
if self._schema_name:
self.engine.execute(f"CREATE SCHEMA IF NOT EXISTS {self._schema_name};")
meta.create_all(self.engine)
except SQLAlchemyError as e:
raise ge_exceptions.StoreBackendError(
f"Unable to connect to table {table_name} because of an error. It is possible your table needs to be migrated to a new schema. SqlAlchemyError: {str(e)}"
)
self._table = table
# Initialize with store_backend_id
self._store_backend_id = None
self._store_backend_id = self.store_backend_id
|
https://github.com/great-expectations/great_expectations/issues/2181
|
➜ great_expectations store list [130 01:40:51P Wed 12/16/20]
Traceback (most recent call last):
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/util.py", line 84, in instantiate_class_from_config
class_instance = class_(**config_with_defaults)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/store/database_store_backend.py", line 76, in __init__
self.engine = self._build_engine(credentials=credentials, **kwargs)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/store/database_store_backend.py", line 164, in _build_engine
engine = sa.create_engine(options, **create_engine_kwargs)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/sqlalchemy/engine/__init__.py", line 500, in create_engine
return strategy.create(*args, **kwargs)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/sqlalchemy/engine/strategies.py", line 164, in create
raise TypeError(
TypeError: Invalid argument(s) 'manually_initialize_store_backend_id' sent to create_engine(), using configuration PGDialect_psycopg2/QueuePool/Engine. Please check that the keyword arguments are appropriate for this combination of components.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/util.py", line 84, in instantiate_class_from_config
class_instance = class_(**config_with_defaults)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/store/expectations_store.py", line 133, in __init__
super().__init__(
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/store/store.py", line 42, in __init__
self._store_backend = instantiate_class_from_config(
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/util.py", line 86, in instantiate_class_from_config
raise TypeError(
TypeError: Couldn't instantiate class : DatabaseStoreBackend with config :
store_name postgres_expectations_store
credentials {'host': 'localhost', 'port': 5432, 'username': 'great', 'password': 'expectations', 'database': 'ge'}
manually_initialize_store_backend_id 91f04917-fe8e-49b7-bea1-db671ebfeb73
table_name ge_expectations_store
key_columns ['expectation_suite_name']
Invalid argument(s) 'manually_initialize_store_backend_id' sent to create_engine(), using configuration PGDialect_psycopg2/QueuePool/Engine. Please check that the keyword arguments are appropriate for this combination of components.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/ryan/.virtualenvs/ge-13/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 72, in main
cli()
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/cli/store.py", line 23, in store_list
context = toolkit.load_data_context_with_error_handling(directory)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/cli/toolkit.py", line 404, in load_data_context_with_error_handling
context = DataContext(directory)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 3106, in __init__
super().__init__(project_config, context_root_directory, runtime_environment)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 261, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 267, in __init__
self._init_stores(self._project_config_with_variables_substituted.stores)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 354, in _init_stores
self._build_store_from_config(store_name, store_config)
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 324, in _build_store_from_config
new_store = instantiate_class_from_config(
File "/Users/ryan/.virtualenvs/ge-13/lib/python3.8/site-packages/great_expectations/data_context/util.py", line 86, in instantiate_class_from_config
raise TypeError(
TypeError: Couldn't instantiate class : ExpectationsStore with config :
store_name postgres_expectations_store
store_backend {'class_name': 'DatabaseStoreBackend', 'credentials': {'drivername': 'postgresql', 'host': 'localhost', 'port': 5432, 'username': 'great', 'password': 'expectations', 'database': 'ge'}, 'manually_initialize_store_backend_id': '91f04917-fe8e-49b7-bea1-db671ebfeb73', 'table_name': 'ge_expectations_store', 'key_columns': ['expectation_suite_name']}
runtime_environment {'root_directory': '/Users/ryan/local/scratch/great-expectations/ge13/great_expectations'}
Couldn't instantiate class : DatabaseStoreBackend with config :
store_name postgres_expectations_store
credentials {'host': 'localhost', 'port': 5432, 'username': 'great', 'password': 'expectations', 'database': 'ge'}
manually_initialize_store_backend_id 91f04917-fe8e-49b7-bea1-db671ebfeb73
table_name ge_expectations_store
key_columns ['expectation_suite_name']
Invalid argument(s) 'manually_initialize_store_backend_id' sent to create_engine(), using configuration PGDialect_psycopg2/QueuePool/Engine. Please check that the keyword arguments are appropriate for this combination of components.
|
TypeError
|
def convert_to_json_serializable(data):
"""
Helper function to convert an object to one that is json serializable
Args:
data: an object to attempt to convert a corresponding json-serializable object
Returns:
(dict) A converted test_object
Warning:
test_obj may also be converted in place.
"""
import datetime
import decimal
import sys
import numpy as np
import pandas as pd
# If it's one of our types, we use our own conversion; this can move to full schema
# once nesting goes all the way down
if isinstance(
data,
(
ExpectationConfiguration,
ExpectationSuite,
ExpectationValidationResult,
ExpectationSuiteValidationResult,
RunIdentifier,
),
):
return data.to_json_dict()
try:
if not isinstance(data, list) and pd.isna(data):
# pd.isna is functionally vectorized, but we only want to apply this to single objects
# Hence, why we test for `not isinstance(list))`
return None
except TypeError:
pass
except ValueError:
pass
if isinstance(data, (str, int, float, bool)):
# No problem to encode json
return data
elif isinstance(data, dict):
new_dict = {}
for key in data:
# A pandas index can be numeric, and a dict key can be numeric, but a json key must be a string
new_dict[str(key)] = convert_to_json_serializable(data[key])
return new_dict
elif isinstance(data, (list, tuple, set)):
new_list = []
for val in data:
new_list.append(convert_to_json_serializable(val))
return new_list
elif isinstance(data, (np.ndarray, pd.Index)):
# test_obj[key] = test_obj[key].tolist()
# If we have an array or index, convert it first to a list--causing coercion to float--and then round
# to the number of digits for which the string representation will equal the float representation
return [convert_to_json_serializable(x) for x in data.tolist()]
# Note: This clause has to come after checking for np.ndarray or we get:
# `ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()`
elif data is None:
# No problem to encode json
return data
elif isinstance(data, (datetime.datetime, datetime.date)):
return data.isoformat()
# Use built in base type from numpy, https://docs.scipy.org/doc/numpy-1.13.0/user/basics.types.html
# https://github.com/numpy/numpy/pull/9505
elif np.issubdtype(type(data), np.bool_):
return bool(data)
elif np.issubdtype(type(data), np.integer) or np.issubdtype(type(data), np.uint):
return int(data)
elif np.issubdtype(type(data), np.floating):
# Note: Use np.floating to avoid FutureWarning from numpy
return float(round(data, sys.float_info.dig))
elif isinstance(data, pd.Series):
# Converting a series is tricky since the index may not be a string, but all json
# keys must be strings. So, we use a very ugly serialization strategy
index_name = data.index.name or "index"
value_name = data.name or "value"
return [
{
index_name: convert_to_json_serializable(idx),
value_name: convert_to_json_serializable(val),
}
for idx, val in data.iteritems()
]
elif isinstance(data, pd.DataFrame):
return convert_to_json_serializable(data.to_dict(orient="records"))
elif isinstance(data, decimal.Decimal):
if not (-1e-55 < decimal.Decimal.from_float(float(data)) - data < 1e-55):
logger.warning(
"Using lossy conversion for decimal %s to float object to support serialization."
% str(data)
)
return float(data)
else:
raise TypeError(
"%s is of type %s which cannot be serialized."
% (str(data), type(data).__name__)
)
|
def convert_to_json_serializable(data):
"""
Helper function to convert an object to one that is json serializable
Args:
data: an object to attempt to convert a corresponding json-serializable object
Returns:
(dict) A converted test_object
Warning:
test_obj may also be converted in place.
"""
import datetime
import decimal
import sys
import numpy as np
import pandas as pd
# If it's one of our types, we use our own conversion; this can move to full schema
# once nesting goes all the way down
if isinstance(
data,
(
ExpectationConfiguration,
ExpectationSuite,
ExpectationValidationResult,
ExpectationSuiteValidationResult,
RunIdentifier,
),
):
return data.to_json_dict()
try:
if not isinstance(data, list) and np.isnan(data):
# np.isnan is functionally vectorized, but we only want to apply this to single objects
# Hence, why we test for `not isinstance(list))`
return None
except TypeError:
pass
except ValueError:
pass
if isinstance(data, (str, int, float, bool)):
# No problem to encode json
return data
elif isinstance(data, dict):
new_dict = {}
for key in data:
# A pandas index can be numeric, and a dict key can be numeric, but a json key must be a string
new_dict[str(key)] = convert_to_json_serializable(data[key])
return new_dict
elif isinstance(data, (list, tuple, set)):
new_list = []
for val in data:
new_list.append(convert_to_json_serializable(val))
return new_list
elif isinstance(data, (np.ndarray, pd.Index)):
# test_obj[key] = test_obj[key].tolist()
# If we have an array or index, convert it first to a list--causing coercion to float--and then round
# to the number of digits for which the string representation will equal the float representation
return [convert_to_json_serializable(x) for x in data.tolist()]
# Note: This clause has to come after checking for np.ndarray or we get:
# `ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()`
elif data is None:
# No problem to encode json
return data
elif isinstance(data, (datetime.datetime, datetime.date)):
return data.isoformat()
# Use built in base type from numpy, https://docs.scipy.org/doc/numpy-1.13.0/user/basics.types.html
# https://github.com/numpy/numpy/pull/9505
elif np.issubdtype(type(data), np.bool_):
return bool(data)
elif np.issubdtype(type(data), np.integer) or np.issubdtype(type(data), np.uint):
return int(data)
elif np.issubdtype(type(data), np.floating):
# Note: Use np.floating to avoid FutureWarning from numpy
return float(round(data, sys.float_info.dig))
elif isinstance(data, pd.Series):
# Converting a series is tricky since the index may not be a string, but all json
# keys must be strings. So, we use a very ugly serialization strategy
index_name = data.index.name or "index"
value_name = data.name or "value"
return [
{
index_name: convert_to_json_serializable(idx),
value_name: convert_to_json_serializable(val),
}
for idx, val in data.iteritems()
]
elif isinstance(data, pd.DataFrame):
return convert_to_json_serializable(data.to_dict(orient="records"))
elif isinstance(data, decimal.Decimal):
if not (-1e-55 < decimal.Decimal.from_float(float(data)) - data < 1e-55):
logger.warning(
"Using lossy conversion for decimal %s to float object to support serialization."
% str(data)
)
return float(data)
else:
raise TypeError(
"%s is of type %s which cannot be serialized."
% (str(data), type(data).__name__)
)
|
https://github.com/great-expectations/great_expectations/issues/2029
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 2000, in __repr__
return json.dumps(self.to_json_dict(), indent=2)
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 2026, in to_json_dict
myself = expectationValidationResultSchema.dump(self)
File "env/lib/python3.8/site-packages/great_expectations/marshmallow__shade/schema.py", line 556, in dump
processed_obj = self._invoke_dump_processors(
File "env/lib/python3.8/site-packages/great_expectations/marshmallow__shade/schema.py", line 1075, in _invoke_dump_processors
data = self._invoke_processors(
File "env/lib/python3.8/site-packages/great_expectations/marshmallow__shade/schema.py", line 1234, in _invoke_processors
data = processor(data, many=many, **kwargs)
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 2096, in convert_result_to_serializable
data.result = convert_to_json_serializable(data.result)
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 132, in convert_to_json_serializable
new_dict[str(key)] = convert_to_json_serializable(data[key])
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 139, in convert_to_json_serializable
new_list.append(convert_to_json_serializable(val))
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 195, in convert_to_json_serializable
raise TypeError(
TypeError: <NA> is of type NAType which cannot be serialized.
|
TypeError
|
def ensure_json_serializable(data):
"""
Helper function to convert an object to one that is json serializable
Args:
data: an object to attempt to convert a corresponding json-serializable object
Returns:
(dict) A converted test_object
Warning:
test_obj may also be converted in place.
"""
import datetime
import decimal
import numpy as np
import pandas as pd
# If it's one of our types, we use our own conversion; this can move to full schema
# once nesting goes all the way down
if isinstance(
data,
(
ExpectationConfiguration,
ExpectationSuite,
ExpectationValidationResult,
ExpectationSuiteValidationResult,
RunIdentifier,
),
):
return
try:
if not isinstance(data, list) and pd.isna(data):
# pd.isna is functionally vectorized, but we only want to apply this to single objects
# Hence, why we test for `not isinstance(list))`
return
except TypeError:
pass
except ValueError:
pass
if isinstance(data, (str, int, float, bool)):
# No problem to encode json
return
elif isinstance(data, dict):
for key in data:
str(key) # key must be cast-able to string
ensure_json_serializable(data[key])
return
elif isinstance(data, (list, tuple, set)):
for val in data:
ensure_json_serializable(val)
return
elif isinstance(data, (np.ndarray, pd.Index)):
# test_obj[key] = test_obj[key].tolist()
# If we have an array or index, convert it first to a list--causing coercion to float--and then round
# to the number of digits for which the string representation will equal the float representation
_ = [ensure_json_serializable(x) for x in data.tolist()]
return
# Note: This clause has to come after checking for np.ndarray or we get:
# `ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()`
elif data is None:
# No problem to encode json
return
elif isinstance(data, (datetime.datetime, datetime.date)):
return
# Use built in base type from numpy, https://docs.scipy.org/doc/numpy-1.13.0/user/basics.types.html
# https://github.com/numpy/numpy/pull/9505
elif np.issubdtype(type(data), np.bool_):
return
elif np.issubdtype(type(data), np.integer) or np.issubdtype(type(data), np.uint):
return
elif np.issubdtype(type(data), np.floating):
# Note: Use np.floating to avoid FutureWarning from numpy
return
elif isinstance(data, pd.Series):
# Converting a series is tricky since the index may not be a string, but all json
# keys must be strings. So, we use a very ugly serialization strategy
index_name = data.index.name or "index"
value_name = data.name or "value"
_ = [
{
index_name: ensure_json_serializable(idx),
value_name: ensure_json_serializable(val),
}
for idx, val in data.iteritems()
]
return
elif isinstance(data, pd.DataFrame):
return ensure_json_serializable(data.to_dict(orient="records"))
elif isinstance(data, decimal.Decimal):
return
else:
raise InvalidExpectationConfigurationError(
"%s is of type %s which cannot be serialized to json"
% (str(data), type(data).__name__)
)
|
def ensure_json_serializable(data):
"""
Helper function to convert an object to one that is json serializable
Args:
data: an object to attempt to convert a corresponding json-serializable object
Returns:
(dict) A converted test_object
Warning:
test_obj may also be converted in place.
"""
import datetime
import decimal
import numpy as np
import pandas as pd
# If it's one of our types, we use our own conversion; this can move to full schema
# once nesting goes all the way down
if isinstance(
data,
(
ExpectationConfiguration,
ExpectationSuite,
ExpectationValidationResult,
ExpectationSuiteValidationResult,
RunIdentifier,
),
):
return
try:
if not isinstance(data, list) and np.isnan(data):
# np.isnan is functionally vectorized, but we only want to apply this to single objects
# Hence, why we test for `not isinstance(list))`
return
except TypeError:
pass
except ValueError:
pass
if isinstance(data, (str, int, float, bool)):
# No problem to encode json
return
elif isinstance(data, dict):
for key in data:
str(key) # key must be cast-able to string
ensure_json_serializable(data[key])
return
elif isinstance(data, (list, tuple, set)):
for val in data:
ensure_json_serializable(val)
return
elif isinstance(data, (np.ndarray, pd.Index)):
# test_obj[key] = test_obj[key].tolist()
# If we have an array or index, convert it first to a list--causing coercion to float--and then round
# to the number of digits for which the string representation will equal the float representation
_ = [ensure_json_serializable(x) for x in data.tolist()]
return
# Note: This clause has to come after checking for np.ndarray or we get:
# `ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()`
elif data is None:
# No problem to encode json
return
elif isinstance(data, (datetime.datetime, datetime.date)):
return
# Use built in base type from numpy, https://docs.scipy.org/doc/numpy-1.13.0/user/basics.types.html
# https://github.com/numpy/numpy/pull/9505
elif np.issubdtype(type(data), np.bool_):
return
elif np.issubdtype(type(data), np.integer) or np.issubdtype(type(data), np.uint):
return
elif np.issubdtype(type(data), np.floating):
# Note: Use np.floating to avoid FutureWarning from numpy
return
elif isinstance(data, pd.Series):
# Converting a series is tricky since the index may not be a string, but all json
# keys must be strings. So, we use a very ugly serialization strategy
index_name = data.index.name or "index"
value_name = data.name or "value"
_ = [
{
index_name: ensure_json_serializable(idx),
value_name: ensure_json_serializable(val),
}
for idx, val in data.iteritems()
]
return
elif isinstance(data, pd.DataFrame):
return ensure_json_serializable(data.to_dict(orient="records"))
elif isinstance(data, decimal.Decimal):
return
else:
raise InvalidExpectationConfigurationError(
"%s is of type %s which cannot be serialized to json"
% (str(data), type(data).__name__)
)
|
https://github.com/great-expectations/great_expectations/issues/2029
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 2000, in __repr__
return json.dumps(self.to_json_dict(), indent=2)
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 2026, in to_json_dict
myself = expectationValidationResultSchema.dump(self)
File "env/lib/python3.8/site-packages/great_expectations/marshmallow__shade/schema.py", line 556, in dump
processed_obj = self._invoke_dump_processors(
File "env/lib/python3.8/site-packages/great_expectations/marshmallow__shade/schema.py", line 1075, in _invoke_dump_processors
data = self._invoke_processors(
File "env/lib/python3.8/site-packages/great_expectations/marshmallow__shade/schema.py", line 1234, in _invoke_processors
data = processor(data, many=many, **kwargs)
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 2096, in convert_result_to_serializable
data.result = convert_to_json_serializable(data.result)
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 132, in convert_to_json_serializable
new_dict[str(key)] = convert_to_json_serializable(data[key])
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 139, in convert_to_json_serializable
new_list.append(convert_to_json_serializable(val))
File "env/lib/python3.8/site-packages/great_expectations/core/__init__.py", line 195, in convert_to_json_serializable
raise TypeError(
TypeError: <NA> is of type NAType which cannot be serialized.
|
TypeError
|
def remove_key(self, key):
from google.cloud import storage
from google.cloud.exceptions import NotFound
gcs = storage.Client(project=self.project)
bucket = gcs.get_bucket(self.bucket)
try:
bucket.delete_blobs(blobs=list(bucket.list_blobs(prefix=self.prefix)))
except NotFound:
return False
return True
|
def remove_key(self, key):
from google.cloud import storage
from google.cloud.exceptions import NotFound
gcs = storage.Client(project=self.project)
bucket = gcs.get_bucket(self.bucket)
try:
bucket.delete_blobs(blobs=bucket.list_blobs(prefix=self.prefix))
except NotFound:
return False
return True
|
https://github.com/great-expectations/great_expectations/issues/1882
|
jovyan@41da7a95ddee:~/ecp-jupyter-notebooks$ great_expectations checkpoint run parquet_check
Heads up! This feature is Experimental. It may change. Please give us your feedback!
Error running action with name update_data_docs
Traceback (most recent call last):
File "/opt/conda/lib/python3.8/site-packages/great_expectations/validation_operators/validation_operators.py", line 379, in _run_actions
action_result = self.actions[action["name"]].run(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/validation_operators/actions.py", line 46, in run
return self._run(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/validation_operators/actions.py", line 436, in _run
data_docs_index_pages = self.data_context.build_data_docs(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 292, in build
index_page_resource_identifier_tuple = self.site_index_builder.build()
File "/opt/conda/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 767, in build
self.target_store.store_backends[
File "/opt/conda/lib/python3.8/site-packages/great_expectations/data_context/store/tuple_store_backend.py", line 699, in remove_key
bucket.delete_blobs(blobs=bucket.list_blobs(prefix=self.prefix))
File "/opt/conda/lib/python3.8/site-packages/google/cloud/storage/bucket.py", line 1608, in delete_blobs
len(blobs),
TypeError: object of type 'HTTPIterator' has no len()
Traceback (most recent call last):
File "/opt/conda/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/opt/conda/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/opt/conda/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/opt/conda/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/opt/conda/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/opt/conda/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/opt/conda/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/opt/conda/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/opt/conda/lib/python3.8/site-packages/great_expectations/cli/mark.py", line 28, in wrapper
func(*args, **kwargs)
File "/opt/conda/lib/python3.8/site-packages/great_expectations/cli/checkpoint.py", line 296, in checkpoint_run
results = context.run_validation_operator(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1001, in run_validation_operator
return validation_operator.run(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/validation_operators/validation_operators.py", line 328, in run
batch_actions_results = self._run_actions(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/validation_operators/validation_operators.py", line 398, in _run_actions
raise e
File "/opt/conda/lib/python3.8/site-packages/great_expectations/validation_operators/validation_operators.py", line 379, in _run_actions
action_result = self.actions[action["name"]].run(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/validation_operators/actions.py", line 46, in run
return self._run(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/validation_operators/actions.py", line 436, in _run
data_docs_index_pages = self.data_context.build_data_docs(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/opt/conda/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 292, in build
index_page_resource_identifier_tuple = self.site_index_builder.build()
File "/opt/conda/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 767, in build
self.target_store.store_backends[
File "/opt/conda/lib/python3.8/site-packages/great_expectations/data_context/store/tuple_store_backend.py", line 699, in remove_key
bucket.delete_blobs(blobs=bucket.list_blobs(prefix=self.prefix))
File "/opt/conda/lib/python3.8/site-packages/google/cloud/storage/bucket.py", line 1608, in delete_blobs
len(blobs),
TypeError: object of type 'HTTPIterator' has no len()
|
TypeError
|
def render(self, validation_results: ExpectationSuiteValidationResult):
run_id = validation_results.meta["run_id"]
if isinstance(run_id, str):
try:
run_time = parse(run_id).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
except (ValueError, TypeError):
run_time = "__none__"
run_name = run_id
elif isinstance(run_id, dict):
run_name = run_id.get("run_name") or "__none__"
run_time = run_id.get("run_time") or "__none__"
elif isinstance(run_id, RunIdentifier):
run_name = run_id.run_name or "__none__"
run_time = run_id.run_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
expectation_suite_name = validation_results.meta["expectation_suite_name"]
batch_kwargs = validation_results.meta.get("batch_kwargs")
# add datasource key to batch_kwargs if missing
if "datasource" not in validation_results.meta.get("batch_kwargs", {}):
# check if expectation_suite_name follows datasource.batch_kwargs_generator.data_asset_name.suite_name pattern
if len(expectation_suite_name.split(".")) == 4:
batch_kwargs["datasource"] = expectation_suite_name.split(".")[0]
# Group EVRs by column
columns = {}
for evr in validation_results.results:
if "column" in evr.expectation_config.kwargs:
column = evr.expectation_config.kwargs["column"]
else:
column = "Table-Level Expectations"
if column not in columns:
columns[column] = []
columns[column].append(evr)
ordered_columns = Renderer._get_column_list_from_evrs(validation_results)
overview_content_blocks = [
self._render_validation_header(validation_results),
self._render_validation_statistics(validation_results=validation_results),
]
collapse_content_blocks = [
self._render_validation_info(validation_results=validation_results)
]
if validation_results["meta"].get("batch_markers"):
collapse_content_blocks.append(
self._render_nested_table_from_dict(
input_dict=validation_results["meta"].get("batch_markers"),
header="Batch Markers",
)
)
if validation_results["meta"].get("batch_kwargs"):
collapse_content_blocks.append(
self._render_nested_table_from_dict(
input_dict=validation_results["meta"].get("batch_kwargs"),
header="Batch Kwargs",
)
)
if validation_results["meta"].get("batch_parameters"):
collapse_content_blocks.append(
self._render_nested_table_from_dict(
input_dict=validation_results["meta"].get("batch_parameters"),
header="Batch Parameters",
)
)
collapse_content_block = CollapseContent(
**{
"collapse_toggle_link": "Show more info...",
"collapse": collapse_content_blocks,
"styling": {
"body": {"classes": ["card", "card-body"]},
"classes": ["col-12", "p-1"],
},
}
)
if not self.run_info_at_end:
overview_content_blocks.append(collapse_content_block)
sections = [
RenderedSectionContent(
**{
"section_name": "Overview",
"content_blocks": overview_content_blocks,
}
)
]
if "Table-Level Expectations" in columns:
sections += [
self._column_section_renderer.render(
validation_results=columns["Table-Level Expectations"]
)
]
sections += [
self._column_section_renderer.render(
validation_results=columns[column],
)
for column in ordered_columns
]
if self.run_info_at_end:
sections += [
RenderedSectionContent(
**{
"section_name": "Run Info",
"content_blocks": collapse_content_blocks,
}
)
]
data_asset_name = batch_kwargs.get("data_asset_name")
# Determine whether we have a custom run_name
try:
run_name_as_time = parse(run_name)
except ValueError:
run_name_as_time = None
try:
run_time_datetime = parse(run_time)
except ValueError:
run_time_datetime = None
include_run_name: bool = False
if run_name_as_time != run_time_datetime and run_name_as_time != "__none__":
include_run_name = True
page_title = "Validations / " + str(expectation_suite_name)
if data_asset_name:
page_title += " / " + str(data_asset_name)
if include_run_name:
page_title += " / " + str(run_name)
page_title += " / " + str(run_time)
return RenderedDocumentContent(
**{
"renderer_type": "ValidationResultsPageRenderer",
"page_title": page_title,
"batch_kwargs": batch_kwargs,
"expectation_suite_name": expectation_suite_name,
"sections": sections,
"utm_medium": "validation-results-page",
}
)
|
def render(self, validation_results: ExpectationSuiteValidationResult):
run_id = validation_results.meta["run_id"]
if isinstance(run_id, str):
try:
run_time = parse(run_id).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
except (ValueError, TypeError):
run_time = "__none__"
run_name = run_id
elif isinstance(run_id, dict):
run_name = run_id.get("run_name") or "__none__"
run_time = run_id.get("run_time") or "__none__"
elif isinstance(run_id, RunIdentifier):
run_name = run_id.run_name or "__none__"
run_time = run_id.run_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
expectation_suite_name = validation_results.meta["expectation_suite_name"]
batch_kwargs = validation_results.meta.get("batch_kwargs")
# add datasource key to batch_kwargs if missing
if "datasource" not in validation_results.meta.get("batch_kwargs", {}):
# check if expectation_suite_name follows datasource.batch_kwargs_generator.data_asset_name.suite_name pattern
if len(expectation_suite_name.split(".")) == 4:
batch_kwargs["datasource"] = expectation_suite_name.split(".")[0]
# Group EVRs by column
columns = {}
for evr in validation_results.results:
if "column" in evr.expectation_config.kwargs:
column = evr.expectation_config.kwargs["column"]
else:
column = "Table-Level Expectations"
if column not in columns:
columns[column] = []
columns[column].append(evr)
ordered_columns = Renderer._get_column_list_from_evrs(validation_results)
overview_content_blocks = [
self._render_validation_header(validation_results),
self._render_validation_statistics(validation_results=validation_results),
]
collapse_content_blocks = [
self._render_validation_info(validation_results=validation_results)
]
if validation_results["meta"].get("batch_markers"):
collapse_content_blocks.append(
self._render_nested_table_from_dict(
input_dict=validation_results["meta"].get("batch_markers"),
header="Batch Markers",
)
)
if validation_results["meta"].get("batch_kwargs"):
collapse_content_blocks.append(
self._render_nested_table_from_dict(
input_dict=validation_results["meta"].get("batch_kwargs"),
header="Batch Kwargs",
)
)
if validation_results["meta"].get("batch_parameters"):
collapse_content_blocks.append(
self._render_nested_table_from_dict(
input_dict=validation_results["meta"].get("batch_parameters"),
header="Batch Parameters",
)
)
collapse_content_block = CollapseContent(
**{
"collapse_toggle_link": "Show more info...",
"collapse": collapse_content_blocks,
"styling": {
"body": {"classes": ["card", "card-body"]},
"classes": ["col-12", "p-1"],
},
}
)
if not self.run_info_at_end:
overview_content_blocks.append(collapse_content_block)
sections = [
RenderedSectionContent(
**{
"section_name": "Overview",
"content_blocks": overview_content_blocks,
}
)
]
if "Table-Level Expectations" in columns:
sections += [
self._column_section_renderer.render(
validation_results=columns["Table-Level Expectations"]
)
]
sections += [
self._column_section_renderer.render(
validation_results=columns[column],
)
for column in ordered_columns
]
if self.run_info_at_end:
sections += [
RenderedSectionContent(
**{
"section_name": "Run Info",
"content_blocks": collapse_content_blocks,
}
)
]
data_asset_name = batch_kwargs.get("data_asset_name")
# Determine whether we have a custom run_name
try:
run_name_as_time = parse(run_name)
except ValueError:
run_name_as_time = None
try:
run_time_datetime = parse(run_time)
except ValueError:
run_time_datetime = None
include_run_name: bool = False
if run_name_as_time != run_time_datetime and run_name_as_time != "__none__":
include_run_name = True
page_title = "Validations / " + expectation_suite_name
if data_asset_name:
page_title += " / " + data_asset_name
if include_run_name:
page_title += " / " + run_name
page_title += " / " + run_time
return RenderedDocumentContent(
**{
"renderer_type": "ValidationResultsPageRenderer",
"page_title": page_title,
"batch_kwargs": batch_kwargs,
"expectation_suite_name": expectation_suite_name,
"sections": sections,
"utm_medium": "validation-results-page",
}
)
|
https://github.com/great-expectations/great_expectations/issues/1913
|
❯ great_expectations docs build
The following Data Docs sites will be built:
- local_site: file:///Users/taylor/repos/sdap/ge/great_expectations/uncommitted/data_docs/local_site/index.html
Would you like to proceed? [Y/n]:
Building Data Docs...
--- Logging error ---
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build
rendered_content = self.renderer_class.render(resource)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render
page_title += " / " + data_asset_name
TypeError: can only concatenate str (not "int") to str
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 1081, in emit
msg = self.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 925, in format
return fmt.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 664, in format
record.message = record.getMessage()
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 369, in getMessage
msg = msg % self.args
TypeError: not all arguments converted during string formatting
Call stack:
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 460, in build
logger.error(exception_message, e, exc_info=True)
Message: 'An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to diagnose and repair the underlying issue. Detailed information follows:\n TypeError: "can only concatenate str (not "int") to str". Traceback: "Traceback (most recent call last):\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build\n rendered_content = self.renderer_class.render(resource)\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render\n page_title += " / " + data_asset_name\nTypeError: can only concatenate str (not "int") to str\n".'
Arguments: (TypeError('can only concatenate str (not "int") to str'),)
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 466, in build
viewable_content,
UnboundLocalError: local variable 'viewable_content' referenced before assignment
|
TypeError
|
def _render_validation_header(cls, validation_results):
success = validation_results.success
expectation_suite_name = validation_results.meta["expectation_suite_name"]
expectation_suite_path_components = (
[".." for _ in range(len(expectation_suite_name.split(".")) + 3)]
+ ["expectations"]
+ str(expectation_suite_name).split(".")
)
expectation_suite_path = os.path.join(*expectation_suite_path_components) + ".html"
if success:
success = "Succeeded"
html_success_icon = (
'<i class="fas fa-check-circle text-success" aria-hidden="true"></i>'
)
else:
success = "Failed"
html_success_icon = (
'<i class="fas fa-times text-danger" aria-hidden="true"></i>'
)
return RenderedHeaderContent(
**{
"content_block_type": "header",
"header": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "Overview",
"tag": "h5",
"styling": {"classes": ["m-0"]},
},
}
),
"subheader": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "${suite_title} ${expectation_suite_name}\n${status_title} ${html_success_icon} ${success}",
"params": {
"suite_title": "Expectation Suite:",
"status_title": "Status:",
"expectation_suite_name": expectation_suite_name,
"success": success,
"html_success_icon": html_success_icon,
},
"styling": {
"params": {
"suite_title": {"classes": ["h6"]},
"status_title": {"classes": ["h6"]},
"expectation_suite_name": {
"tag": "a",
"attributes": {"href": expectation_suite_path},
},
},
"classes": ["mb-0", "mt-1"],
},
},
}
),
"styling": {
"classes": ["col-12", "p-0"],
"header": {"classes": ["alert", "alert-secondary"]},
},
}
)
|
def _render_validation_header(cls, validation_results):
success = validation_results.success
expectation_suite_name = validation_results.meta["expectation_suite_name"]
expectation_suite_path_components = (
[".." for _ in range(len(expectation_suite_name.split(".")) + 3)]
+ ["expectations"]
+ expectation_suite_name.split(".")
)
expectation_suite_path = os.path.join(*expectation_suite_path_components) + ".html"
if success:
success = "Succeeded"
html_success_icon = (
'<i class="fas fa-check-circle text-success" aria-hidden="true"></i>'
)
else:
success = "Failed"
html_success_icon = (
'<i class="fas fa-times text-danger" aria-hidden="true"></i>'
)
return RenderedHeaderContent(
**{
"content_block_type": "header",
"header": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "Overview",
"tag": "h5",
"styling": {"classes": ["m-0"]},
},
}
),
"subheader": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "${suite_title} ${expectation_suite_name}\n${status_title} ${html_success_icon} ${success}",
"params": {
"suite_title": "Expectation Suite:",
"status_title": "Status:",
"expectation_suite_name": expectation_suite_name,
"success": success,
"html_success_icon": html_success_icon,
},
"styling": {
"params": {
"suite_title": {"classes": ["h6"]},
"status_title": {"classes": ["h6"]},
"expectation_suite_name": {
"tag": "a",
"attributes": {"href": expectation_suite_path},
},
},
"classes": ["mb-0", "mt-1"],
},
},
}
),
"styling": {
"classes": ["col-12", "p-0"],
"header": {"classes": ["alert", "alert-secondary"]},
},
}
)
|
https://github.com/great-expectations/great_expectations/issues/1913
|
❯ great_expectations docs build
The following Data Docs sites will be built:
- local_site: file:///Users/taylor/repos/sdap/ge/great_expectations/uncommitted/data_docs/local_site/index.html
Would you like to proceed? [Y/n]:
Building Data Docs...
--- Logging error ---
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build
rendered_content = self.renderer_class.render(resource)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render
page_title += " / " + data_asset_name
TypeError: can only concatenate str (not "int") to str
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 1081, in emit
msg = self.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 925, in format
return fmt.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 664, in format
record.message = record.getMessage()
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 369, in getMessage
msg = msg % self.args
TypeError: not all arguments converted during string formatting
Call stack:
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 460, in build
logger.error(exception_message, e, exc_info=True)
Message: 'An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to diagnose and repair the underlying issue. Detailed information follows:\n TypeError: "can only concatenate str (not "int") to str". Traceback: "Traceback (most recent call last):\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build\n rendered_content = self.renderer_class.render(resource)\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render\n page_title += " / " + data_asset_name\nTypeError: can only concatenate str (not "int") to str\n".'
Arguments: (TypeError('can only concatenate str (not "int") to str'),)
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 466, in build
viewable_content,
UnboundLocalError: local variable 'viewable_content' referenced before assignment
|
TypeError
|
def render(self, expectations):
columns, ordered_columns = self._group_and_order_expectations_by_column(
expectations
)
expectation_suite_name = expectations.expectation_suite_name
overview_content_blocks = [
self._render_expectation_suite_header(),
self._render_expectation_suite_info(expectations),
]
table_level_expectations_content_block = self._render_table_level_expectations(
columns
)
if table_level_expectations_content_block is not None:
overview_content_blocks.append(table_level_expectations_content_block)
asset_notes_content_block = self._render_expectation_suite_notes(expectations)
if asset_notes_content_block is not None:
overview_content_blocks.append(asset_notes_content_block)
sections = [
RenderedSectionContent(
**{
"section_name": "Overview",
"content_blocks": overview_content_blocks,
}
)
]
sections += [
self._column_section_renderer.render(expectations=columns[column])
for column in ordered_columns
if column != "_nocolumn"
]
return RenderedDocumentContent(
**{
"renderer_type": "ExpectationSuitePageRenderer",
"page_title": "Expectations / " + str(expectation_suite_name),
"expectation_suite_name": expectation_suite_name,
"utm_medium": "expectation-suite-page",
"sections": sections,
}
)
|
def render(self, expectations):
columns, ordered_columns = self._group_and_order_expectations_by_column(
expectations
)
expectation_suite_name = expectations.expectation_suite_name
overview_content_blocks = [
self._render_expectation_suite_header(),
self._render_expectation_suite_info(expectations),
]
table_level_expectations_content_block = self._render_table_level_expectations(
columns
)
if table_level_expectations_content_block is not None:
overview_content_blocks.append(table_level_expectations_content_block)
asset_notes_content_block = self._render_expectation_suite_notes(expectations)
if asset_notes_content_block is not None:
overview_content_blocks.append(asset_notes_content_block)
sections = [
RenderedSectionContent(
**{
"section_name": "Overview",
"content_blocks": overview_content_blocks,
}
)
]
sections += [
self._column_section_renderer.render(expectations=columns[column])
for column in ordered_columns
if column != "_nocolumn"
]
return RenderedDocumentContent(
**{
"renderer_type": "ExpectationSuitePageRenderer",
"page_title": "Expectations / " + expectation_suite_name,
"expectation_suite_name": expectation_suite_name,
"utm_medium": "expectation-suite-page",
"sections": sections,
}
)
|
https://github.com/great-expectations/great_expectations/issues/1913
|
❯ great_expectations docs build
The following Data Docs sites will be built:
- local_site: file:///Users/taylor/repos/sdap/ge/great_expectations/uncommitted/data_docs/local_site/index.html
Would you like to proceed? [Y/n]:
Building Data Docs...
--- Logging error ---
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build
rendered_content = self.renderer_class.render(resource)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render
page_title += " / " + data_asset_name
TypeError: can only concatenate str (not "int") to str
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 1081, in emit
msg = self.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 925, in format
return fmt.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 664, in format
record.message = record.getMessage()
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 369, in getMessage
msg = msg % self.args
TypeError: not all arguments converted during string formatting
Call stack:
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 460, in build
logger.error(exception_message, e, exc_info=True)
Message: 'An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to diagnose and repair the underlying issue. Detailed information follows:\n TypeError: "can only concatenate str (not "int") to str". Traceback: "Traceback (most recent call last):\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build\n rendered_content = self.renderer_class.render(resource)\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render\n page_title += " / " + data_asset_name\nTypeError: can only concatenate str (not "int") to str\n".'
Arguments: (TypeError('can only concatenate str (not "int") to str'),)
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 466, in build
viewable_content,
UnboundLocalError: local variable 'viewable_content' referenced before assignment
|
TypeError
|
def render(self, validation_results):
run_id = validation_results.meta["run_id"]
if isinstance(run_id, str):
try:
run_time = parse(run_id).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
except (ValueError, TypeError):
run_time = "__none__"
run_name = run_id
elif isinstance(run_id, dict):
run_name = run_id.get("run_name") or "__none__"
run_time = run_id.get("run_time") or "__none__"
elif isinstance(run_id, RunIdentifier):
run_name = run_id.run_name or "__none__"
run_time = run_id.run_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
expectation_suite_name = validation_results.meta["expectation_suite_name"]
batch_kwargs = validation_results.meta.get("batch_kwargs")
# add datasource key to batch_kwargs if missing
if "datasource" not in validation_results.meta.get("batch_kwargs", {}):
# check if expectation_suite_name follows datasource.batch_kwargs_generator.data_asset_name.suite_name pattern
if len(expectation_suite_name.split(".")) == 4:
batch_kwargs["datasource"] = expectation_suite_name.split(".")[0]
# Group EVRs by column
# TODO: When we implement a ValidationResultSuite class, this method will move there.
columns = self._group_evrs_by_column(validation_results)
ordered_columns = Renderer._get_column_list_from_evrs(validation_results)
column_types = self._overview_section_renderer._get_column_types(validation_results)
data_asset_name = batch_kwargs.get("data_asset_name")
# Determine whether we have a custom run_name
try:
run_name_as_time = parse(run_name)
except ValueError:
run_name_as_time = None
try:
run_time_datetime = parse(run_time)
except ValueError:
run_time_datetime = None
include_run_name: bool = False
if run_name_as_time != run_time_datetime and run_name_as_time != "__none__":
include_run_name = True
page_title = "Profiling Results / " + str(expectation_suite_name)
if data_asset_name:
page_title += " / " + str(data_asset_name)
if include_run_name:
page_title += " / " + str(run_name)
page_title += " / " + str(run_time)
return RenderedDocumentContent(
**{
"renderer_type": "ProfilingResultsPageRenderer",
"page_title": page_title,
"expectation_suite_name": expectation_suite_name,
"utm_medium": "profiling-results-page",
"batch_kwargs": batch_kwargs,
"sections": [
self._overview_section_renderer.render(
validation_results, section_name="Overview"
)
]
+ [
self._column_section_renderer.render(
columns[column],
section_name=column,
column_type=column_types.get(column),
)
for column in ordered_columns
],
}
)
|
def render(self, validation_results):
run_id = validation_results.meta["run_id"]
if isinstance(run_id, str):
try:
run_time = parse(run_id).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
except (ValueError, TypeError):
run_time = "__none__"
run_name = run_id
elif isinstance(run_id, dict):
run_name = run_id.get("run_name") or "__none__"
run_time = run_id.get("run_time") or "__none__"
elif isinstance(run_id, RunIdentifier):
run_name = run_id.run_name or "__none__"
run_time = run_id.run_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
expectation_suite_name = validation_results.meta["expectation_suite_name"]
batch_kwargs = validation_results.meta.get("batch_kwargs")
# add datasource key to batch_kwargs if missing
if "datasource" not in validation_results.meta.get("batch_kwargs", {}):
# check if expectation_suite_name follows datasource.batch_kwargs_generator.data_asset_name.suite_name pattern
if len(expectation_suite_name.split(".")) == 4:
batch_kwargs["datasource"] = expectation_suite_name.split(".")[0]
# Group EVRs by column
# TODO: When we implement a ValidationResultSuite class, this method will move there.
columns = self._group_evrs_by_column(validation_results)
ordered_columns = Renderer._get_column_list_from_evrs(validation_results)
column_types = self._overview_section_renderer._get_column_types(validation_results)
data_asset_name = batch_kwargs.get("data_asset_name")
# Determine whether we have a custom run_name
try:
run_name_as_time = parse(run_name)
except ValueError:
run_name_as_time = None
try:
run_time_datetime = parse(run_time)
except ValueError:
run_time_datetime = None
include_run_name: bool = False
if run_name_as_time != run_time_datetime and run_name_as_time != "__none__":
include_run_name = True
page_title = "Profiling Results / " + expectation_suite_name
if data_asset_name:
page_title += " / " + data_asset_name
if include_run_name:
page_title += " / " + run_name
page_title += " / " + run_time
return RenderedDocumentContent(
**{
"renderer_type": "ProfilingResultsPageRenderer",
"page_title": page_title,
"expectation_suite_name": expectation_suite_name,
"utm_medium": "profiling-results-page",
"batch_kwargs": batch_kwargs,
"sections": [
self._overview_section_renderer.render(
validation_results, section_name="Overview"
)
]
+ [
self._column_section_renderer.render(
columns[column],
section_name=column,
column_type=column_types.get(column),
)
for column in ordered_columns
],
}
)
|
https://github.com/great-expectations/great_expectations/issues/1913
|
❯ great_expectations docs build
The following Data Docs sites will be built:
- local_site: file:///Users/taylor/repos/sdap/ge/great_expectations/uncommitted/data_docs/local_site/index.html
Would you like to proceed? [Y/n]:
Building Data Docs...
--- Logging error ---
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build
rendered_content = self.renderer_class.render(resource)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render
page_title += " / " + data_asset_name
TypeError: can only concatenate str (not "int") to str
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 1081, in emit
msg = self.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 925, in format
return fmt.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 664, in format
record.message = record.getMessage()
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 369, in getMessage
msg = msg % self.args
TypeError: not all arguments converted during string formatting
Call stack:
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 460, in build
logger.error(exception_message, e, exc_info=True)
Message: 'An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to diagnose and repair the underlying issue. Detailed information follows:\n TypeError: "can only concatenate str (not "int") to str". Traceback: "Traceback (most recent call last):\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build\n rendered_content = self.renderer_class.render(resource)\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render\n page_title += " / " + data_asset_name\nTypeError: can only concatenate str (not "int") to str\n".'
Arguments: (TypeError('can only concatenate str (not "int") to str'),)
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 466, in build
viewable_content,
UnboundLocalError: local variable 'viewable_content' referenced before assignment
|
TypeError
|
def build(self, resource_identifiers=None):
source_store_keys = self.source_store.list_keys()
if self.name == "validations" and self.validation_results_limit:
source_store_keys = sorted(
source_store_keys, key=lambda x: x.run_id.run_time, reverse=True
)[: self.validation_results_limit]
for resource_key in source_store_keys:
# if no resource_identifiers are passed, the section
# builder will build
# a page for every keys in its source store.
# if the caller did pass resource_identifiers, the section builder
# will build pages only for the specified resources
if resource_identifiers and resource_key not in resource_identifiers:
continue
if self.run_name_filter:
if not resource_key_passes_run_name_filter(
resource_key, self.run_name_filter
):
continue
try:
resource = self.source_store.get(resource_key)
except exceptions.InvalidKeyError:
logger.warning(
f"Object with Key: {str(resource_key)} could not be retrieved. Skipping..."
)
continue
if isinstance(resource_key, ExpectationSuiteIdentifier):
expectation_suite_name = resource_key.expectation_suite_name
logger.debug(
" Rendering expectation suite {}".format(expectation_suite_name)
)
elif isinstance(resource_key, ValidationResultIdentifier):
run_id = resource_key.run_id
run_name = run_id.run_name
run_time = run_id.run_time
expectation_suite_name = (
resource_key.expectation_suite_identifier.expectation_suite_name
)
if self.name == "profiling":
logger.debug(
" Rendering profiling for batch {}".format(
resource_key.batch_identifier
)
)
else:
logger.debug(
" Rendering validation: run name: {}, run time: {}, suite {} for batch {}".format(
run_name,
run_time,
expectation_suite_name,
resource_key.batch_identifier,
)
)
try:
rendered_content = self.renderer_class.render(resource)
viewable_content = self.view_class.render(
rendered_content,
data_context_id=self.data_context_id,
show_how_to_buttons=self.show_how_to_buttons,
)
self.target_store.set(
SiteSectionIdentifier(
site_section_name=self.name,
resource_identifier=resource_key,
),
viewable_content,
)
except Exception as e:
exception_message = f"""\
An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will \
not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to \
diagnose and repair the underlying issue. Detailed information follows:
"""
exception_traceback = traceback.format_exc()
exception_message += (
f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".'
)
logger.error(exception_message, e, exc_info=True)
|
def build(self, resource_identifiers=None):
source_store_keys = self.source_store.list_keys()
if self.name == "validations" and self.validation_results_limit:
source_store_keys = sorted(
source_store_keys, key=lambda x: x.run_id.run_time, reverse=True
)[: self.validation_results_limit]
for resource_key in source_store_keys:
# if no resource_identifiers are passed, the section
# builder will build
# a page for every keys in its source store.
# if the caller did pass resource_identifiers, the section builder
# will build pages only for the specified resources
if resource_identifiers and resource_key not in resource_identifiers:
continue
if self.run_name_filter:
if not resource_key_passes_run_name_filter(
resource_key, self.run_name_filter
):
continue
try:
resource = self.source_store.get(resource_key)
except exceptions.InvalidKeyError:
logger.warning(
f"Object with Key: {str(resource_key)} could not be retrieved. Skipping..."
)
continue
if isinstance(resource_key, ExpectationSuiteIdentifier):
expectation_suite_name = resource_key.expectation_suite_name
logger.debug(
" Rendering expectation suite {}".format(expectation_suite_name)
)
elif isinstance(resource_key, ValidationResultIdentifier):
run_id = resource_key.run_id
run_name = run_id.run_name
run_time = run_id.run_time
expectation_suite_name = (
resource_key.expectation_suite_identifier.expectation_suite_name
)
if self.name == "profiling":
logger.debug(
" Rendering profiling for batch {}".format(
resource_key.batch_identifier
)
)
else:
logger.debug(
" Rendering validation: run name: {}, run time: {}, suite {} for batch {}".format(
run_name,
run_time,
expectation_suite_name,
resource_key.batch_identifier,
)
)
try:
rendered_content = self.renderer_class.render(resource)
viewable_content = self.view_class.render(
rendered_content,
data_context_id=self.data_context_id,
show_how_to_buttons=self.show_how_to_buttons,
)
except Exception as e:
exception_message = f"""\
An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will \
not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to \
diagnose and repair the underlying issue. Detailed information follows:
"""
exception_traceback = traceback.format_exc()
exception_message += (
f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".'
)
logger.error(exception_message, e, exc_info=True)
self.target_store.set(
SiteSectionIdentifier(
site_section_name=self.name,
resource_identifier=resource_key,
),
viewable_content,
)
|
https://github.com/great-expectations/great_expectations/issues/1913
|
❯ great_expectations docs build
The following Data Docs sites will be built:
- local_site: file:///Users/taylor/repos/sdap/ge/great_expectations/uncommitted/data_docs/local_site/index.html
Would you like to proceed? [Y/n]:
Building Data Docs...
--- Logging error ---
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build
rendered_content = self.renderer_class.render(resource)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render
page_title += " / " + data_asset_name
TypeError: can only concatenate str (not "int") to str
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 1081, in emit
msg = self.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 925, in format
return fmt.format(record)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 664, in format
record.message = record.getMessage()
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/logging/__init__.py", line 369, in getMessage
msg = msg % self.args
TypeError: not all arguments converted during string formatting
Call stack:
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 460, in build
logger.error(exception_message, e, exc_info=True)
Message: 'An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to diagnose and repair the underlying issue. Detailed information follows:\n TypeError: "can only concatenate str (not "int") to str". Traceback: "Traceback (most recent call last):\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 443, in build\n rendered_content = self.renderer_class.render(resource)\n File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/page_renderer.py", line 210, in render\n page_title += " / " + data_asset_name\nTypeError: can only concatenate str (not "int") to str\n".'
Arguments: (TypeError('can only concatenate str (not "int") to str'),)
Traceback (most recent call last):
File "/Users/taylor/repos/sdap/.venv/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/cli.py", line 73, in main
cli()
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 45, in docs_build
build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/cli/docs.py", line 146, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 257, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/data_context/data_context.py", line 1572, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 290, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/sdap/.venv/lib/python3.8/site-packages/great_expectations/render/renderer/site_builder.py", line 466, in build
viewable_content,
UnboundLocalError: local variable 'viewable_content' referenced before assignment
|
TypeError
|
def __init__(
self,
expectation_suite_name,
expectations=None,
evaluation_parameters=None,
data_asset_type=None,
meta=None,
):
self.expectation_suite_name = expectation_suite_name
if expectations is None:
expectations = []
self.expectations = [
ExpectationConfiguration(**expectation)
if isinstance(expectation, dict)
else expectation
for expectation in expectations
]
if evaluation_parameters is None:
evaluation_parameters = {}
self.evaluation_parameters = evaluation_parameters
self.data_asset_type = data_asset_type
if meta is None:
meta = {"great_expectations.__version__": ge_version}
if not "great_expectations.__version__" in meta.keys():
meta["great_expectations.__version__"] = ge_version
# We require meta information to be serializable, but do not convert until necessary
ensure_json_serializable(meta)
self.meta = meta
|
def __init__(
self,
expectation_suite_name,
expectations=None,
evaluation_parameters=None,
data_asset_type=None,
meta=None,
):
self.expectation_suite_name = expectation_suite_name
if expectations is None:
expectations = []
self.expectations = [
ExpectationConfiguration(**expectation)
if isinstance(expectation, dict)
else expectation
for expectation in expectations
]
if evaluation_parameters is None:
evaluation_parameters = {}
self.evaluation_parameters = evaluation_parameters
self.data_asset_type = data_asset_type
if meta is None:
meta = {"great_expectations.__version__": ge_version}
# We require meta information to be serializable, but do not convert until necessary
ensure_json_serializable(meta)
self.meta = meta
|
https://github.com/great-expectations/great_expectations/issues/1637
|
--- Logging error ---
Traceback (most recent call last):
File "/Users/taylor/repos/forks/great_expectations/great_expectations/render/renderer/site_builder.py", line 427, in build
rendered_content = self.renderer_class.render(resource)
File "/Users/taylor/repos/forks/great_expectations/great_expectations/render/renderer/page_renderer.py", line 457, in render
self._render_expectation_suite_info(expectations),
File "/Users/taylor/repos/forks/great_expectations/great_expectations/render/renderer/page_renderer.py", line 539, in _render_expectation_suite_info
ge_version = expectations.meta["great_expectations.__version__"]
KeyError: 'great_expectations.__version__'
|
KeyError
|
def _set(self, key, value, content_encoding="utf-8", content_type="application/json"):
gcs_object_key = os.path.join(self.prefix, self._convert_key_to_filepath(key))
from google.cloud import storage
gcs = storage.Client(project=self.project)
bucket = gcs.get_bucket(self.bucket)
blob = bucket.blob(gcs_object_key)
if isinstance(value, str):
blob.upload_from_string(
value.encode(content_encoding), content_type=content_type
)
else:
blob.upload_from_string(value, content_type=content_type)
return gcs_object_key
|
def _set(self, key, value, content_encoding="utf-8", content_type="application/json"):
gcs_object_key = os.path.join(self.prefix, self._convert_key_to_filepath(key))
from google.cloud import storage
gcs = storage.Client(project=self.project)
bucket = gcs.get_bucket(self.bucket)
blob = bucket.blob(gcs_object_key)
if isinstance(value, str):
blob.upload_from_string(
value.encode(content_encoding),
content_encoding=content_encoding,
content_type=content_type,
)
else:
blob.upload_from_string(value, content_type=content_type)
return gcs_object_key
|
https://github.com/great-expectations/great_expectations/issues/1393
|
$ great_expectations docs build
Building Data Docs...
Traceback (most recent call last):
File "/Users/taylor/repos/great_expectations/.venv/bin/great_expectations", line 11, in <module>
load_entry_point('great-expectations', 'console_scripts', 'great_expectations')()
File "/Users/taylor/repos/great_expectations/great_expectations/cli/cli.py", line 67, in main
cli()
File "/Users/taylor/repos/great_expectations/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/taylor/repos/great_expectations/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/taylor/repos/great_expectations/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/great_expectations/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/taylor/repos/great_expectations/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/taylor/repos/great_expectations/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/taylor/repos/great_expectations/great_expectations/cli/docs.py", line 40, in docs_build
build_docs(context, site_name=site_name, view=view)
File "/Users/taylor/repos/great_expectations/great_expectations/cli/docs.py", line 132, in build_docs
index_page_locator_infos = context.build_data_docs(site_names=site_names)
File "/Users/taylor/repos/great_expectations/great_expectations/core/usage_statistics/usage_statistics.py", line 217, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/Users/taylor/repos/great_expectations/great_expectations/data_context/data_context.py", line 1255, in build_data_docs
index_page_resource_identifier_tuple = site_builder.build(resource_identifiers)
File "/Users/taylor/repos/great_expectations/great_expectations/render/renderer/site_builder.py", line 237, in build
site_section_builder.build(resource_identifiers=resource_identifiers)
File "/Users/taylor/repos/great_expectations/great_expectations/render/renderer/site_builder.py", line 389, in build
viewable_content
File "/Users/taylor/repos/great_expectations/great_expectations/data_context/store/html_site_store.py", line 144, in set
content_encoding='utf-8', content_type='text/html; charset=utf-8')
File "/Users/taylor/repos/great_expectations/great_expectations/data_context/store/store_backend.py", line 32, in set
return self._set(key, value, **kwargs)
File "/Users/taylor/repos/great_expectations/great_expectations/data_context/store/tuple_store_backend.py", line 473, in _set
content_type=content_type)
TypeError: upload_from_string() got an unexpected keyword argument 'content_encoding'
|
TypeError
|
def build_docs(context, site_name=None, view=True):
"""Build documentation in a context"""
logger.debug("Starting cli.datasource.build_docs")
cli_message("Building Data Docs...")
if site_name is not None:
site_names = [site_name]
else:
site_names = None
index_page_locator_infos = context.build_data_docs(site_names=site_names)
msg = "The following Data Docs sites were built:\n"
for site_name, index_page_locator_info in index_page_locator_infos.items():
if os.path.isfile(index_page_locator_info):
msg += " - <cyan>{}:</cyan> ".format(site_name)
msg += "file://{}\n".format(index_page_locator_info)
else:
msg += " - <cyan>{}:</cyan> ".format(site_name)
msg += "{}\n".format(index_page_locator_info)
msg = msg.rstrip("\n")
cli_message(msg)
if view:
context.open_data_docs(site_name=site_name)
|
def build_docs(context, site_name=None, view=True):
"""Build documentation in a context"""
logger.debug("Starting cli.datasource.build_docs")
cli_message("Building Data Docs...")
if site_name is not None:
site_names = [site_name]
else:
site_names = None
index_page_locator_infos = context.build_data_docs(site_names=site_names)
msg = "The following Data Docs sites were built:\n"
for site_name, index_page_locator_info in index_page_locator_infos.items():
if os.path.isfile(index_page_locator_info):
msg += " - <cyan>{}:</cyan> ".format(site_name)
msg += "file://{}\n".format(index_page_locator_info)
else:
msg += " - <cyan>{}:</cyan> ".format(site_name)
msg += "{}\n".format(index_page_locator_info)
msg = msg.rstrip("\n")
cli_message(msg)
if view:
context.open_data_docs()
|
https://github.com/great-expectations/great_expectations/issues/1378
|
Running great_expectations docs build --site-name local_site
Building Data Docs...
The following Data Docs sites were built:
- local_site: file:///great_expectations/uncommitted/data_docs/local_site/index.html
Traceback (most recent call last):
File "/usr/local/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/usr/local/lib/python3.7/site-packages/great_expectations/cli/cli.py", line 67, in main
cli()
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/great_expectations/cli/docs.py", line 40, in docs_build
build_docs(context, site_name=site_name, view=view)
File "/usr/local/lib/python3.7/site-packages/great_expectations/cli/docs.py", line 147, in build_docs
context.open_data_docs()
File "/usr/local/lib/python3.7/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 215, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/data_context.py", line 428, in open_data_docs
data_docs_urls = self.get_docs_sites_urls(resource_identifier=resource_identifier)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/data_context.py", line 411, in get_docs_sites_urls
url = site_builder.get_resource_url(resource_identifier=resource_identifier)
File "/usr/local/lib/python3.7/site-packages/great_expectations/render/renderer/site_builder.py", line 253, in get_resource_url
return self.target_store.get_url_for_resource(resource_identifier=resource_identifier)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/store/html_site_store.py", line 168, in get_url_for_resource
return store_backend.get_url_for_key(key)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/store/tuple_store_backend.py", line 380, in get_url_for_key
location = boto3.client('s3').get_bucket_location(Bucket=self.bucket)['LocationConstraint']
File "/usr/local/lib/python3.7/site-packages/botocore/client.py", line 357, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python3.7/site-packages/botocore/client.py", line 661, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (InvalidAccessKeyId) when calling the GetBucketLocation operation: The AWS Access Key Id you provided does not exist in our records.
|
botocore.exceptions.ClientError
|
def get_docs_sites_urls(
self, resource_identifier=None, site_name: Optional[str] = None
) -> List[Dict[str, str]]:
"""
Get URLs for a resource for all data docs sites.
This function will return URLs for any configured site even if the sites
have not been built yet.
Args:
resource_identifier (object): optional. It can be an identifier of
ExpectationSuite's, ValidationResults and other resources that
have typed identifiers. If not provided, the method will return
the URLs of the index page.
site_name: Optionally specify which site to open. If not specified,
return all urls in the project.
Returns:
list: a list of URLs. Each item is the URL for the resource for a
data docs site
"""
sites = self._project_config_with_variables_substituted.data_docs_sites
if not sites:
logger.debug("Found no data_docs_sites.")
return []
logger.debug(f"Found {len(sites)} data_docs_sites.")
if site_name:
if site_name not in sites.keys():
raise ge_exceptions.DataContextError(
f"Could not find site named {site_name}. Please check your configurations"
)
site = sites[site_name]
site_builder = self._load_site_builder_from_site_config(site)
url = site_builder.get_resource_url(resource_identifier=resource_identifier)
return [{"site_name": site_name, "site_url": url}]
site_urls = []
for _site_name, site_config in sites.items():
site_builder = self._load_site_builder_from_site_config(site_config)
url = site_builder.get_resource_url(resource_identifier=resource_identifier)
site_urls.append({"site_name": _site_name, "site_url": url})
return site_urls
|
def get_docs_sites_urls(self, resource_identifier=None):
"""
Get URLs for a resource for all data docs sites.
This function will return URLs for any configured site even if the sites have not
been built yet.
:param resource_identifier: optional. It can be an identifier of ExpectationSuite's,
ValidationResults and other resources that have typed identifiers.
If not provided, the method will return the URLs of the index page.
:return: a list of URLs. Each item is the URL for the resource for a data docs site
"""
site_urls = []
site_names = None
sites = self._project_config_with_variables_substituted.data_docs_sites
if sites:
logger.debug("Found data_docs_sites.")
for site_name, site_config in sites.items():
if (site_names and site_name in site_names) or not site_names:
complete_site_config = site_config
module_name = "great_expectations.render.renderer.site_builder"
site_builder = instantiate_class_from_config(
config=complete_site_config,
runtime_environment={
"data_context": self,
"root_directory": self.root_directory,
},
config_defaults={"module_name": module_name},
)
if not site_builder:
raise ge_exceptions.ClassInstantiationError(
module_name=module_name,
package_name=None,
class_name=complete_site_config["class_name"],
)
url = site_builder.get_resource_url(
resource_identifier=resource_identifier
)
site_urls.append({"site_name": site_name, "site_url": url})
return site_urls
|
https://github.com/great-expectations/great_expectations/issues/1378
|
Running great_expectations docs build --site-name local_site
Building Data Docs...
The following Data Docs sites were built:
- local_site: file:///great_expectations/uncommitted/data_docs/local_site/index.html
Traceback (most recent call last):
File "/usr/local/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/usr/local/lib/python3.7/site-packages/great_expectations/cli/cli.py", line 67, in main
cli()
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/great_expectations/cli/docs.py", line 40, in docs_build
build_docs(context, site_name=site_name, view=view)
File "/usr/local/lib/python3.7/site-packages/great_expectations/cli/docs.py", line 147, in build_docs
context.open_data_docs()
File "/usr/local/lib/python3.7/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 215, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/data_context.py", line 428, in open_data_docs
data_docs_urls = self.get_docs_sites_urls(resource_identifier=resource_identifier)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/data_context.py", line 411, in get_docs_sites_urls
url = site_builder.get_resource_url(resource_identifier=resource_identifier)
File "/usr/local/lib/python3.7/site-packages/great_expectations/render/renderer/site_builder.py", line 253, in get_resource_url
return self.target_store.get_url_for_resource(resource_identifier=resource_identifier)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/store/html_site_store.py", line 168, in get_url_for_resource
return store_backend.get_url_for_key(key)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/store/tuple_store_backend.py", line 380, in get_url_for_key
location = boto3.client('s3').get_bucket_location(Bucket=self.bucket)['LocationConstraint']
File "/usr/local/lib/python3.7/site-packages/botocore/client.py", line 357, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python3.7/site-packages/botocore/client.py", line 661, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (InvalidAccessKeyId) when calling the GetBucketLocation operation: The AWS Access Key Id you provided does not exist in our records.
|
botocore.exceptions.ClientError
|
def open_data_docs(
self, resource_identifier: Optional[str] = None, site_name: Optional[str] = None
) -> None:
"""
A stdlib cross-platform way to open a file in a browser.
Args:
resource_identifier: ExpectationSuiteIdentifier,
ValidationResultIdentifier or any other type's identifier. The
argument is optional - when not supplied, the method returns the
URL of the index page.
site_name: Optionally specify which site to open. If not specified,
open all docs found in the project.
"""
data_docs_urls = self.get_docs_sites_urls(
resource_identifier=resource_identifier,
site_name=site_name,
)
urls_to_open = [site["site_url"] for site in data_docs_urls]
for url in urls_to_open:
logger.debug(f"Opening Data Docs found here: {url}")
webbrowser.open(url)
|
def open_data_docs(self, resource_identifier=None):
"""
A stdlib cross-platform way to open a file in a browser.
:param resource_identifier: ExpectationSuiteIdentifier, ValidationResultIdentifier
or any other type's identifier. The argument is optional - when
not supplied, the method returns the URL of the index page.
"""
data_docs_urls = self.get_docs_sites_urls(resource_identifier=resource_identifier)
for site_dict in data_docs_urls:
logger.debug("Opening Data Docs found here: {}".format(site_dict["site_url"]))
webbrowser.open(site_dict["site_url"])
|
https://github.com/great-expectations/great_expectations/issues/1378
|
Running great_expectations docs build --site-name local_site
Building Data Docs...
The following Data Docs sites were built:
- local_site: file:///great_expectations/uncommitted/data_docs/local_site/index.html
Traceback (most recent call last):
File "/usr/local/bin/great_expectations", line 8, in <module>
sys.exit(main())
File "/usr/local/lib/python3.7/site-packages/great_expectations/cli/cli.py", line 67, in main
cli()
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/great_expectations/cli/docs.py", line 40, in docs_build
build_docs(context, site_name=site_name, view=view)
File "/usr/local/lib/python3.7/site-packages/great_expectations/cli/docs.py", line 147, in build_docs
context.open_data_docs()
File "/usr/local/lib/python3.7/site-packages/great_expectations/core/usage_statistics/usage_statistics.py", line 215, in usage_statistics_wrapped_method
result = func(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/data_context.py", line 428, in open_data_docs
data_docs_urls = self.get_docs_sites_urls(resource_identifier=resource_identifier)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/data_context.py", line 411, in get_docs_sites_urls
url = site_builder.get_resource_url(resource_identifier=resource_identifier)
File "/usr/local/lib/python3.7/site-packages/great_expectations/render/renderer/site_builder.py", line 253, in get_resource_url
return self.target_store.get_url_for_resource(resource_identifier=resource_identifier)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/store/html_site_store.py", line 168, in get_url_for_resource
return store_backend.get_url_for_key(key)
File "/usr/local/lib/python3.7/site-packages/great_expectations/data_context/store/tuple_store_backend.py", line 380, in get_url_for_key
location = boto3.client('s3').get_bucket_location(Bucket=self.bucket)['LocationConstraint']
File "/usr/local/lib/python3.7/site-packages/botocore/client.py", line 357, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python3.7/site-packages/botocore/client.py", line 661, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (InvalidAccessKeyId) when calling the GetBucketLocation operation: The AWS Access Key Id you provided does not exist in our records.
|
botocore.exceptions.ClientError
|
def setConfig(self, button):
try:
if os.path.exists("/opt/sublime_text/sublime_text"):
Popen(
[
"/opt/sublime_text/sublime_text",
os.environ["HOME"] + "/.config/kinto/kinto.py",
]
)
elif which("gedit") is not None:
Popen(["gedit", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which("mousepad") is not None:
Popen(["mousepad", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which("kate") is not None:
Popen(["kate", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which("kwrite") is not None:
Popen(["kwrite", os.environ["HOME"] + "/.config/kinto/kinto.py"])
except CalledProcessError: # Notify user about error on running restart commands.
Popen(["notify-send", "Kinto: Error could not open config file!"])
|
def setConfig(self, button):
try:
if os.path.exists("/opt/sublime_text/sublime_text"):
Popen(
[
"/opt/sublime_text/sublime_text",
os.environ["HOME"] + "/.config/kinto/kinto.py",
]
)
elif which(gedit) is not None:
Popen(["gedit", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which(mousepad) is not None:
Popen(["mousepad", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which(kate) is not None:
Popen(["kate", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which(kwrite) is not None:
Popen(["kwrite", os.environ["HOME"] + "/.config/kinto/kinto.py"])
except CalledProcessError: # Notify user about error on running restart commands.
Popen(["notify-send", "Kinto: Error could not open config file!"])
|
https://github.com/rbreaves/kinto/issues/317
|
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 623, in setConfig
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 635, in setService
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 694, in setSysKB
Popen(['gnome-control-center','keyboard'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 700, in setRegion
Popen(['gnome-control-center','region'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
|
NameError
|
def setService(self, button):
try:
if os.path.exists("/opt/sublime_text/sublime_text"):
Popen(
[
"/opt/sublime_text/sublime_text",
"/lib/systemd/system/xkeysnail.service",
]
)
elif which("gedit") is not None:
Popen(["gedit", "/lib/systemd/system/xkeysnail.service"])
elif which("mousepad") is not None:
Popen(["mousepad", "/lib/systemd/system/xkeysnail.service"])
elif which("kate") is not None:
Popen(["kate", "/lib/systemd/system/xkeysnail.service"])
elif which("kwrite") is not None:
Popen(["kwrite", "/lib/systemd/system/xkeysnail.service"])
except CalledProcessError: # Notify user about error on running restart commands.
Popen(["notify-send", "Kinto: Error could not open config file!"])
|
def setService(self, button):
try:
if os.path.exists("/opt/sublime_text/sublime_text"):
Popen(
[
"/opt/sublime_text/sublime_text",
"/lib/systemd/system/xkeysnail.service",
]
)
elif which(gedit) is not None:
Popen(["gedit", "/lib/systemd/system/xkeysnail.service"])
elif which(mousepad) is not None:
Popen(["mousepad", "/lib/systemd/system/xkeysnail.service"])
elif which(kate) is not None:
Popen(["kate", "/lib/systemd/system/xkeysnail.service"])
elif which(kwrite) is not None:
Popen(["kwrite", "/lib/systemd/system/xkeysnail.service"])
except CalledProcessError: # Notify user about error on running restart commands.
Popen(["notify-send", "Kinto: Error could not open config file!"])
|
https://github.com/rbreaves/kinto/issues/317
|
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 623, in setConfig
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 635, in setService
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 694, in setSysKB
Popen(['gnome-control-center','keyboard'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 700, in setRegion
Popen(['gnome-control-center','region'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
|
NameError
|
def setSysKB(self, button):
if self.ostype == "XFCE":
Popen(["xfce4-keyboard-settings"])
elif self.ostype == "KDE":
self.queryConfig(
"systemsettings >/dev/null 2>&1 || systemsettings5 >/dev/null 2>&1"
)
else:
Popen(["gnome-control-center", "keyboard"])
|
def setSysKB(self, button):
if self.ostype == "XFCE":
Popen(["xfce4-keyboard-settings"])
else:
Popen(["gnome-control-center", "keyboard"])
|
https://github.com/rbreaves/kinto/issues/317
|
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 623, in setConfig
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 635, in setService
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 694, in setSysKB
Popen(['gnome-control-center','keyboard'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 700, in setRegion
Popen(['gnome-control-center','region'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
|
NameError
|
def setRegion(self, button):
if self.ostype == "XFCE":
Popen(["gnome-language-selector"])
elif self.ostype == "KDE":
self.queryConfig(
"kcmshell4 kcm_translations >/dev/null 2>&1 || kcmshell5 kcm_translations >/dev/null 2>&1"
)
else:
Popen(["gnome-control-center", "region"])
|
def setRegion(self, button):
if self.ostype == "XFCE":
Popen(["gnome-language-selector"])
else:
Popen(["gnome-control-center", "region"])
|
https://github.com/rbreaves/kinto/issues/317
|
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 623, in setConfig
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 635, in setService
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 694, in setSysKB
Popen(['gnome-control-center','keyboard'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 700, in setRegion
Popen(['gnome-control-center','region'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
|
NameError
|
def setConfig(self, button):
try:
if os.path.exists("/opt/sublime_text/sublime_text"):
Popen(
[
"/opt/sublime_text/sublime_text",
os.environ["HOME"] + "/.config/kinto/kinto.py",
]
)
elif which("gedit") is not None:
Popen(["gedit", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which("mousepad") is not None:
Popen(["mousepad", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which("kate") is not None:
Popen(["kate", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which("kwrite") is not None:
Popen(["kwrite", os.environ["HOME"] + "/.config/kinto/kinto.py"])
except CalledProcessError: # Notify user about error on running restart commands.
Popen(["notify-send", "Kinto: Error could not open config file!"])
|
def setConfig(self, button):
try:
if os.path.exists("/opt/sublime_text/sublime_text"):
Popen(
[
"/opt/sublime_text/sublime_text",
os.environ["HOME"] + "/.config/kinto/kinto.py",
]
)
elif which(gedit) is not None:
Popen(["gedit", os.environ["HOME"] + "/.config/kinto/kinto.py"])
elif which(mousepad) is not None:
Popen(["mousepad", os.environ["HOME"] + "/.config/kinto/kinto.py"])
except CalledProcessError: # Notify user about error on running restart commands.
Popen(["notify-send", "Kinto: Error could not open config file!"])
|
https://github.com/rbreaves/kinto/issues/317
|
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 623, in setConfig
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 635, in setService
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 694, in setSysKB
Popen(['gnome-control-center','keyboard'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 700, in setRegion
Popen(['gnome-control-center','region'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
|
NameError
|
def setService(self, button):
try:
if os.path.exists("/opt/sublime_text/sublime_text"):
Popen(
[
"/opt/sublime_text/sublime_text",
"/lib/systemd/system/xkeysnail.service",
]
)
elif which("gedit") is not None:
Popen(["gedit", "/lib/systemd/system/xkeysnail.service"])
elif which("mousepad") is not None:
Popen(["mousepad", "/lib/systemd/system/xkeysnail.service"])
elif which("kate") is not None:
Popen(["kate", "/lib/systemd/system/xkeysnail.service"])
elif which("kwrite") is not None:
Popen(["kwrite", "/lib/systemd/system/xkeysnail.service"])
except CalledProcessError: # Notify user about error on running restart commands.
Popen(["notify-send", "Kinto: Error could not open config file!"])
|
def setService(self, button):
try:
if os.path.exists("/opt/sublime_text/sublime_text"):
Popen(
[
"/opt/sublime_text/sublime_text",
"/lib/systemd/system/xkeysnail.service",
]
)
elif which(gedit) is not None:
Popen(["gedit", "/lib/systemd/system/xkeysnail.service"])
elif which(mousepad) is not None:
Popen(["mousepad", "/lib/systemd/system/xkeysnail.service"])
except CalledProcessError: # Notify user about error on running restart commands.
Popen(["notify-send", "Kinto: Error could not open config file!"])
|
https://github.com/rbreaves/kinto/issues/317
|
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 623, in setConfig
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 635, in setService
elif which(gedit) is not None:
NameError: name 'gedit' is not defined
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 694, in setSysKB
Popen(['gnome-control-center','keyboard'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
Traceback (most recent call last):
File "/home/nemokden/.config/kinto/kintotray.py", line 700, in setRegion
Popen(['gnome-control-center','region'])
File "/usr/lib/python3.8/subprocess.py", line 854, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.8/subprocess.py", line 1702, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] 그런 파일이나 디렉터리가 없습니다: 'gnome-control-center'
|
NameError
|
def keyboard_detect():
global internalid, usbid, chromeswap, system_type
internal_kbname = ""
usb_kbname = ""
# If chromebook
if system_type == "2":
print()
print("Looking for keyboards...")
print()
result = subprocess.check_output(
'xinput list | grep -iv "Virtual\|USB" | grep -i "keyboard.*keyboard" | grep -o -P "(?<=↳).*(?=id\=)";exit 0',
shell=True,
).decode("utf-8")
if result != "":
internal_kbname = result.strip()
internalid = subprocess.check_output(
'xinput list | grep -iv "Virtual\|USB" | grep -i "keyboard.*keyboard" | cut -d "=" -f 2- | awk \'{print $1}\' | tail -1;exit 0',
shell=True,
).decode("utf-8")
print("Internal Keyboard\nName: " + internal_kbname + "\nID: " + internalid)
result = subprocess.check_output(
'udevadm info -e | grep -o -P "(?<=by-id/usb-).*(?=-event-kbd)" | head -1;exit 0',
shell=True,
).decode("utf-8")
if result != "":
usb_kbname = result.strip()
# Loop the following to ensure the id is picked up after 5-10 tries
usbid = ""
usbcount = 0
while usbid == "":
usbid = subprocess.check_output(
'udevadm info -e | stdbuf -oL grep -o -P "(?<=event-kbd /dev/input/by-path/pci-0000:00:).*(?=.0-usb) | head -n 1";exit 0',
shell=True,
).decode("utf-8")
if usbid == "":
usbcount += 1
# print('usbid not found '+ str(usbcount))
if usbcount == 5:
usbid = "0"
time.sleep(1)
print("\nUSB Keyboard\n" + "Name: " + usb_kbname + "\nID: " + usbid)
if system_type == "1":
system_type = "windows"
elif system_type == "2":
system_type = "chromebook"
elif system_type == "3":
system_type = "mac"
if system_type == "windows" or system_type == "mac":
subprocess.check_output("/bin/bash -c ./mac_wordwise.sh", shell=True).decode(
"utf-8"
)
cmdgui = '"/usr/bin/setxkbmap -option;xkbcomp -w0 -I$HOME/.xkb ~/.xkb/keymap/kbd.mac.onelvl $DISPLAY"'
# subprocess.check_output('echo "1" > /sys/module/hid_apple/parameters/swap_opt_cmd', shell=True).decode('utf-8')
elif system_type == "chromebook":
subprocess.check_output("/bin/bash -c ./chromebook.sh", shell=True).decode(
"utf-8"
)
cmdgui = '"setxkbmap -option;xkbcomp -w0 -I$HOME/.xkb ~/.xkb/keymap/kbd.chromebook.gui $DISPLAY"'
# password = getpass("Please enter your password to complete the keyswap: ")
# proc = Popen("echo '1' | sudo tee -a /sys/module/hid_apple/parameters/swap_opt_cmd".split(), stdin=PIPE, stdout=PIPE, stderr=PIPE)
# proc.communicate(password.encode())
if swap_behavior == 1:
print("Setting up " + system_type + " keyswap as a service.")
print("You can disable and remove the service by using the following commands.")
print("systemctl --user stop keyswap")
print("systemctl --user disable keyswap")
print("rm -rf ~/.config/autostart/keyswap.sh")
print("rm -rf ~/.config/xactive.sh")
keyswapcmd = (
'/bin/bash -c "./keyswap_service.sh 1 0 '
+ system_type
+ " "
+ str(internalid).strip()
+ " "
+ str(usbid).strip()
+ " "
+ str(chromeswap)
+ '"'
)
print(keyswapcmd)
subprocess.check_output(keyswapcmd, shell=True).decode("utf-8")
else:
print(
"Setting up "
+ system_type
+ " keyswap inside your profiles ~/.Xsession file."
)
print(
"You can modify or remove the file if you want you want to remove the modification."
)
keyswapcmd = '/bin/bash -c "./keyswap_service.sh 0 ' + cmdgui + '"'
subprocess.check_output(keyswapcmd, shell=True).decode("utf-8")
print(
"Please run this command in the terminal if you are using a Windows or Macbook."
)
print("Your keymapping will not work right on Apple keyboards without it.")
print("echo '1' | sudo tee -a /sys/module/hid_apple/parameters/swap_opt_cmd")
|
def keyboard_detect():
global internalid, usbid, chromeswap, system_type
internal_kbname = ""
usb_kbname = ""
print()
print("Looking for keyboards...")
print()
result = subprocess.check_output(
'xinput list | grep -iv "Virtual\|USB" | grep -i "keyboard.*keyboard" | grep -o -P "(?<=↳).*(?=id\=)";exit 0',
shell=True,
).decode("utf-8")
if result != "":
internal_kbname = result.strip()
internalid = subprocess.check_output(
'xinput list | grep -iv "Virtual\|USB" | grep -i "keyboard.*keyboard" | cut -d "=" -f 2- | awk \'{print $1}\' | tail -1;exit 0',
shell=True,
).decode("utf-8")
print("Internal Keyboard\nName: " + internal_kbname + "\nID: " + internalid)
result = subprocess.check_output(
'udevadm info -e | grep -o -P "(?<=by-id/usb-).*(?=-event-kbd)" | head -1;exit 0',
shell=True,
).decode("utf-8")
if result != "":
usb_kbname = result.strip()
# Loop the following to ensure the id is picked up after 5-10 tries
usbid = ""
usbcount = 0
while usbid == "":
usbid = subprocess.check_output(
'udevadm info -e | stdbuf -oL grep -o -P "(?<=event-kbd /dev/input/by-path/pci-0000:00:).*(?=.0-usb)";exit 0',
shell=True,
).decode("utf-8")
if usbid == "":
usbcount += 1
# print('usbid not found '+ str(usbcount))
if usbcount == 5:
usbid = "0"
time.sleep(1)
print("\nUSB Keyboard\n" + "Name: " + usb_kbname + "\nID: " + usbid)
if system_type == "1":
system_type = "windows"
elif system_type == "2":
system_type = "chromebook"
elif system_type == "3":
system_type = "mac"
if system_type == "windows" or system_type == "mac":
subprocess.check_output("/bin/bash -c ./mac_wordwise.sh", shell=True).decode(
"utf-8"
)
cmdgui = '"/usr/bin/setxkbmap -option;xkbcomp -w0 -I$HOME/.xkb ~/.xkb/keymap/kbd.mac.onelvl $DISPLAY"'
# subprocess.check_output('echo "1" > /sys/module/hid_apple/parameters/swap_opt_cmd', shell=True).decode('utf-8')
elif system_type == "chromebook":
subprocess.check_output("/bin/bash -c ./chromebook.sh", shell=True).decode(
"utf-8"
)
cmdgui = '"setxkbmap -option;xkbcomp -w0 -I$HOME/.xkb ~/.xkb/keymap/kbd.chromebook.gui $DISPLAY"'
# password = getpass("Please enter your password to complete the keyswap: ")
# proc = Popen("echo '1' | sudo tee -a /sys/module/hid_apple/parameters/swap_opt_cmd".split(), stdin=PIPE, stdout=PIPE, stderr=PIPE)
# proc.communicate(password.encode())
if swap_behavior == 1:
print("Setting up " + system_type + " keyswap as a service.")
print("You can disable and remove the service by using the following commands.")
print("systemctl --user stop keyswap")
print("systemctl --user disable keyswap")
print("rm -rf ~/.config/autostart/keyswap.sh")
print("rm -rf ~/.config/xactive.sh")
keyswapcmd = (
'/bin/bash -c "./keyswap_service.sh 1 0 '
+ system_type
+ " "
+ str(internalid).strip()
+ " "
+ str(usbid).strip()
+ " "
+ str(chromeswap)
+ '"'
)
print(keyswapcmd)
subprocess.check_output(keyswapcmd, shell=True).decode("utf-8")
else:
print(
"Setting up "
+ system_type
+ " keyswap inside your profiles ~/.Xsession file."
)
print(
"You can modify or remove the file if you want you want to remove the modification."
)
keyswapcmd = '/bin/bash -c "./keyswap_service.sh 0 ' + cmdgui + '"'
subprocess.check_output(keyswapcmd, shell=True).decode("utf-8")
print(
"Please run this command in the terminal if you are using a Windows or Macbook."
)
print("Your keymapping will not work right on Apple keyboards without it.")
print("echo '1' | sudo tee -a /sys/module/hid_apple/parameters/swap_opt_cmd")
|
https://github.com/rbreaves/kinto/issues/5
|
K!nt◎
- F!x the dɑmn kɐyb◎ɑrd. -
Press Enter to begin...
What type of system are you using?
1) Windows
2) Chromebook
3) Mac
3
Would you like to swap Command back to Super/Win and Ctrl key back to Ctrl when using terminal applications? (y/n)
Note: For a more mac like experience & less issues with terminal based interactions y is recommended.
y
Looking for keyboards...
Internal Keyboard
Name: AT Translated Set 2 keyboard
ID: 17
USB Keyboard
Name: Microsoft_Microsoft_Nano_Transceiver_1.1
ID: 14
14
14
Setting up mac keyswap as a service.
You can disable and remove the service by using the following commands.
systemctl --user stop keyswap
systemctl --user disable keyswap
rm -rf ~/.config/autostart/keyswap.sh
rm -rf ~/.config/xactive.sh
/bin/bash -c "./keyswap_service.sh 1 0 mac 17 14
14
14 0"
+ swapbehavior=1
+ noswapcmd=0
+ systemtype=mac
+ internalid=17
+ usbid=14
+ chromeswap=
+ [[ 1 == \1 ]]
++ whoami
+ swapcmd='\/bin\/bash\ \/home\/bdu\/.config\/xactive.sh\ mac\ 17\ 14\ '
+ mkdir -p /home/bdu/.config/systemd/user
+ cp ./system-config/keyswap.service /home/bdu/.config/systemd/user/keyswap.service
+ cp ./system-config/keyswap.sh /home/bdu/.config/autostart/keyswap.sh
+ cp ./system-config/xactive.sh /home/bdu/.config/xactive.sh
++ whoami
+ sed -i 's/{username}/bdu/g' /home/bdu/.config/systemd/user/keyswap.service
+ sed -i 's/ExecStart=/ExecStart=\/bin\/bash\ \/home\/bdu\/.config\/xactive.sh\ mac\ 17\ 14\ /g' /home/bdu/.config/systemd/user/keyswap.service
+ systemctl --user enable keyswap
+ systemctl --user start keyswap
/bin/bash: line 1: 14: command not found
/bin/bash: line 2: 14: command not found
Traceback (most recent call last):
File "./install.py", line 162, in <module>
keyboard_detect()
File "./install.py", line 82, in keyboard_detect
subprocess.check_output(keyswapcmd, shell=True).decode('utf-8')
File "/usr/lib/python3.6/subprocess.py", line 356, in check_output
**kwargs).stdout
File "/usr/lib/python3.6/subprocess.py", line 438, in run
output=stdout, stderr=stderr)
subprocess.CalledProcessError: Command '/bin/bash -c "./keyswap_service.sh 1 0 mac 17 14
14
14 0"' returned non-zero exit status 127.
|
subprocess.CalledProcessError
|
def pull(
self,
repository,
tag=None,
stream=False,
auth_config=None,
decode=False,
platform=None,
):
"""
Pulls an image. Similar to the ``docker pull`` command.
Args:
repository (str): The repository to pull
tag (str): The tag to pull
stream (bool): Stream the output as a generator. Make sure to
consume the generator, otherwise pull might get cancelled.
auth_config (dict): Override the credentials that
:py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
decode (bool): Decode the JSON data from the server into dicts.
Only applies with ``stream=True``
platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
(generator or str): The output
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> for line in cli.pull('busybox', stream=True, decode=True):
... print(json.dumps(line, indent=4))
{
"status": "Pulling image (latest) from busybox",
"progressDetail": {},
"id": "e72ac664f4f0"
}
{
"status": "Pulling image (latest) from busybox, endpoint: ...",
"progressDetail": {},
"id": "e72ac664f4f0"
}
"""
if not tag:
repository, tag = utils.parse_repository_tag(repository)
registry, repo_name = auth.resolve_repository_name(repository)
params = {"tag": tag, "fromImage": repository}
headers = {}
if auth_config is None:
header = auth.get_config_header(self, registry)
if header:
headers["X-Registry-Auth"] = header
else:
log.debug("Sending supplied auth config")
headers["X-Registry-Auth"] = auth.encode_header(auth_config)
if platform is not None:
if utils.version_lt(self._version, "1.32"):
raise errors.InvalidVersion(
"platform was only introduced in API version 1.32"
)
params["platform"] = platform
response = self._post(
self._url("/images/create"),
params=params,
headers=headers,
stream=stream,
timeout=None,
)
self._raise_for_status(response)
if stream:
return self._stream_helper(response, decode=decode)
return self._result(response)
|
def pull(
self,
repository,
tag=None,
stream=False,
auth_config=None,
decode=False,
platform=None,
):
"""
Pulls an image. Similar to the ``docker pull`` command.
Args:
repository (str): The repository to pull
tag (str): The tag to pull
stream (bool): Stream the output as a generator
auth_config (dict): Override the credentials that
:py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
decode (bool): Decode the JSON data from the server into dicts.
Only applies with ``stream=True``
platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
(generator or str): The output
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> for line in cli.pull('busybox', stream=True, decode=True):
... print(json.dumps(line, indent=4))
{
"status": "Pulling image (latest) from busybox",
"progressDetail": {},
"id": "e72ac664f4f0"
}
{
"status": "Pulling image (latest) from busybox, endpoint: ...",
"progressDetail": {},
"id": "e72ac664f4f0"
}
"""
if not tag:
repository, tag = utils.parse_repository_tag(repository)
registry, repo_name = auth.resolve_repository_name(repository)
params = {"tag": tag, "fromImage": repository}
headers = {}
if auth_config is None:
header = auth.get_config_header(self, registry)
if header:
headers["X-Registry-Auth"] = header
else:
log.debug("Sending supplied auth config")
headers["X-Registry-Auth"] = auth.encode_header(auth_config)
if platform is not None:
if utils.version_lt(self._version, "1.32"):
raise errors.InvalidVersion(
"platform was only introduced in API version 1.32"
)
params["platform"] = platform
response = self._post(
self._url("/images/create"),
params=params,
headers=headers,
stream=stream,
timeout=None,
)
self._raise_for_status(response)
if stream:
return self._stream_helper(response, decode=decode)
return self._result(response)
|
https://github.com/docker/docker-py/issues/2116
|
python
client = docker.DockerClient(base_url='unix://var/run/docker.sock')
client.login(username='XXXX', password='XXXX', registry='https://index.docker.io/v1/')
client.images.pull('docker.io/vdmtl/portail-datamigration-worker-gcs-lib:latest', stream=True)
Traceback (most recent call last):
File "/Users/XXXX/apps/open-data-etl/venv/lib/python3.6/site-packages/docker/api/client.py", line 229, in _raise_for_status
response.raise_for_status()
File "/Users/XXX/apps/open-data-etl/venv/lib/python3.6/site-packages/requests/models.py", line 937, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 404 Client Error: Not Found for url: http+docker://localhost/v1.35/images/docker.io/vdmtl/portail-datamigration-worker-gcs-lib:latest/json
client.images.pull('docker.io/vdmtl/portail-datamigration-worker-gcs-lib:latest')
<Image: 'vdmtl/portail-datamigration-worker-gcs-lib:latest'>
|
requests.exceptions.HTTPError
|
def pull(self, repository, tag=None, **kwargs):
"""
Pull an image of the given name and return it. Similar to the
``docker pull`` command.
If no tag is specified, all tags from that repository will be
pulled.
If you want to get the raw pull output, use the
:py:meth:`~docker.api.image.ImageApiMixin.pull` method in the
low-level API.
Args:
repository (str): The repository to pull
tag (str): The tag to pull
auth_config (dict): Override the credentials that
:py:meth:`~docker.client.DockerClient.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
(:py:class:`Image` or list): The image that has been pulled.
If no ``tag`` was specified, the method will return a list
of :py:class:`Image` objects belonging to this repository.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> # Pull the image tagged `latest` in the busybox repo
>>> image = client.images.pull('busybox:latest')
>>> # Pull all tags in the busybox repo
>>> images = client.images.pull('busybox')
"""
if not tag:
repository, tag = parse_repository_tag(repository)
if "stream" in kwargs:
warnings.warn(
"`stream` is not a valid parameter for this method and will be overridden"
)
del kwargs["stream"]
pull_log = self.client.api.pull(repository, tag=tag, stream=True, **kwargs)
for _ in pull_log:
# We don't do anything with the logs, but we need
# to keep the connection alive and wait for the image
# to be pulled.
pass
if tag:
return self.get(
"{0}{2}{1}".format(
repository, tag, "@" if tag.startswith("sha256:") else ":"
)
)
return self.list(repository)
|
def pull(self, repository, tag=None, **kwargs):
"""
Pull an image of the given name and return it. Similar to the
``docker pull`` command.
If no tag is specified, all tags from that repository will be
pulled.
If you want to get the raw pull output, use the
:py:meth:`~docker.api.image.ImageApiMixin.pull` method in the
low-level API.
Args:
repository (str): The repository to pull
tag (str): The tag to pull
auth_config (dict): Override the credentials that
:py:meth:`~docker.client.DockerClient.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
(:py:class:`Image` or list): The image that has been pulled.
If no ``tag`` was specified, the method will return a list
of :py:class:`Image` objects belonging to this repository.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> # Pull the image tagged `latest` in the busybox repo
>>> image = client.images.pull('busybox:latest')
>>> # Pull all tags in the busybox repo
>>> images = client.images.pull('busybox')
"""
if not tag:
repository, tag = parse_repository_tag(repository)
self.client.api.pull(repository, tag=tag, **kwargs)
if tag:
return self.get(
"{0}{2}{1}".format(
repository, tag, "@" if tag.startswith("sha256:") else ":"
)
)
return self.list(repository)
|
https://github.com/docker/docker-py/issues/2116
|
python
client = docker.DockerClient(base_url='unix://var/run/docker.sock')
client.login(username='XXXX', password='XXXX', registry='https://index.docker.io/v1/')
client.images.pull('docker.io/vdmtl/portail-datamigration-worker-gcs-lib:latest', stream=True)
Traceback (most recent call last):
File "/Users/XXXX/apps/open-data-etl/venv/lib/python3.6/site-packages/docker/api/client.py", line 229, in _raise_for_status
response.raise_for_status()
File "/Users/XXX/apps/open-data-etl/venv/lib/python3.6/site-packages/requests/models.py", line 937, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 404 Client Error: Not Found for url: http+docker://localhost/v1.35/images/docker.io/vdmtl/portail-datamigration-worker-gcs-lib:latest/json
client.images.pull('docker.io/vdmtl/portail-datamigration-worker-gcs-lib:latest')
<Image: 'vdmtl/portail-datamigration-worker-gcs-lib:latest'>
|
requests.exceptions.HTTPError
|
def pull(
self,
repository,
tag=None,
stream=False,
auth_config=None,
decode=False,
platform=None,
):
"""
Pulls an image. Similar to the ``docker pull`` command.
Args:
repository (str): The repository to pull
tag (str): The tag to pull
stream (bool): Stream the output as a generator. Make sure to
consume the generator, otherwise pull might get cancelled.
auth_config (dict): Override the credentials that
:py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
decode (bool): Decode the JSON data from the server into dicts.
Only applies with ``stream=True``
platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
(generator or str): The output
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> for line in cli.pull('busybox', stream=True):
... print(json.dumps(json.loads(line), indent=4))
{
"status": "Pulling image (latest) from busybox",
"progressDetail": {},
"id": "e72ac664f4f0"
}
{
"status": "Pulling image (latest) from busybox, endpoint: ...",
"progressDetail": {},
"id": "e72ac664f4f0"
}
"""
if not tag:
repository, tag = utils.parse_repository_tag(repository)
registry, repo_name = auth.resolve_repository_name(repository)
params = {"tag": tag, "fromImage": repository}
headers = {}
if auth_config is None:
header = auth.get_config_header(self, registry)
if header:
headers["X-Registry-Auth"] = header
else:
log.debug("Sending supplied auth config")
headers["X-Registry-Auth"] = auth.encode_header(auth_config)
if platform is not None:
if utils.version_lt(self._version, "1.32"):
raise errors.InvalidVersion(
"platform was only introduced in API version 1.32"
)
params["platform"] = platform
response = self._post(
self._url("/images/create"),
params=params,
headers=headers,
stream=stream,
timeout=None,
)
self._raise_for_status(response)
if stream:
return self._stream_helper(response, decode=decode)
return self._result(response)
|
def pull(
self,
repository,
tag=None,
stream=False,
auth_config=None,
decode=False,
platform=None,
):
"""
Pulls an image. Similar to the ``docker pull`` command.
Args:
repository (str): The repository to pull
tag (str): The tag to pull
stream (bool): Stream the output as a generator
auth_config (dict): Override the credentials that
:py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
decode (bool): Decode the JSON data from the server into dicts.
Only applies with ``stream=True``
platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
(generator or str): The output
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> for line in cli.pull('busybox', stream=True):
... print(json.dumps(json.loads(line), indent=4))
{
"status": "Pulling image (latest) from busybox",
"progressDetail": {},
"id": "e72ac664f4f0"
}
{
"status": "Pulling image (latest) from busybox, endpoint: ...",
"progressDetail": {},
"id": "e72ac664f4f0"
}
"""
if not tag:
repository, tag = utils.parse_repository_tag(repository)
registry, repo_name = auth.resolve_repository_name(repository)
params = {"tag": tag, "fromImage": repository}
headers = {}
if auth_config is None:
header = auth.get_config_header(self, registry)
if header:
headers["X-Registry-Auth"] = header
else:
log.debug("Sending supplied auth config")
headers["X-Registry-Auth"] = auth.encode_header(auth_config)
if platform is not None:
if utils.version_lt(self._version, "1.32"):
raise errors.InvalidVersion(
"platform was only introduced in API version 1.32"
)
params["platform"] = platform
response = self._post(
self._url("/images/create"),
params=params,
headers=headers,
stream=stream,
timeout=None,
)
self._raise_for_status(response)
if stream:
return self._stream_helper(response, decode=decode)
return self._result(response)
|
https://github.com/docker/docker-py/issues/2116
|
python
client = docker.DockerClient(base_url='unix://var/run/docker.sock')
client.login(username='XXXX', password='XXXX', registry='https://index.docker.io/v1/')
client.images.pull('docker.io/vdmtl/portail-datamigration-worker-gcs-lib:latest', stream=True)
Traceback (most recent call last):
File "/Users/XXXX/apps/open-data-etl/venv/lib/python3.6/site-packages/docker/api/client.py", line 229, in _raise_for_status
response.raise_for_status()
File "/Users/XXX/apps/open-data-etl/venv/lib/python3.6/site-packages/requests/models.py", line 937, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 404 Client Error: Not Found for url: http+docker://localhost/v1.35/images/docker.io/vdmtl/portail-datamigration-worker-gcs-lib:latest/json
client.images.pull('docker.io/vdmtl/portail-datamigration-worker-gcs-lib:latest')
<Image: 'vdmtl/portail-datamigration-worker-gcs-lib:latest'>
|
requests.exceptions.HTTPError
|
def build(
self,
path=None,
tag=None,
quiet=False,
fileobj=None,
nocache=False,
rm=False,
timeout=None,
custom_context=False,
encoding=None,
pull=False,
forcerm=False,
dockerfile=None,
container_limits=None,
decode=False,
buildargs=None,
gzip=False,
shmsize=None,
labels=None,
cache_from=None,
target=None,
network_mode=None,
squash=None,
extra_hosts=None,
platform=None,
isolation=None,
):
"""
Similar to the ``docker build`` command. Either ``path`` or ``fileobj``
needs to be set. ``path`` can be a local path (to a directory
containing a Dockerfile) or a remote URL. ``fileobj`` must be a
readable file-like object to a Dockerfile.
If you have a tar file for the Docker build context (including a
Dockerfile) already, pass a readable file-like object to ``fileobj``
and also pass ``custom_context=True``. If the stream is compressed
also, set ``encoding`` to the correct value (e.g ``gzip``).
Example:
>>> from io import BytesIO
>>> from docker import APIClient
>>> dockerfile = '''
... # Shared Volume
... FROM busybox:buildroot-2014.02
... VOLUME /data
... CMD ["/bin/sh"]
... '''
>>> f = BytesIO(dockerfile.encode('utf-8'))
>>> cli = APIClient(base_url='tcp://127.0.0.1:2375')
>>> response = [line for line in cli.build(
... fileobj=f, rm=True, tag='yourname/volume'
... )]
>>> response
['{"stream":" ---\\u003e a9eb17255234\\n"}',
'{"stream":"Step 1 : VOLUME /data\\n"}',
'{"stream":" ---\\u003e Running in abdc1e6896c6\\n"}',
'{"stream":" ---\\u003e 713bca62012e\\n"}',
'{"stream":"Removing intermediate container abdc1e6896c6\\n"}',
'{"stream":"Step 2 : CMD [\\"/bin/sh\\"]\\n"}',
'{"stream":" ---\\u003e Running in dba30f2a1a7e\\n"}',
'{"stream":" ---\\u003e 032b8b2855fc\\n"}',
'{"stream":"Removing intermediate container dba30f2a1a7e\\n"}',
'{"stream":"Successfully built 032b8b2855fc\\n"}']
Args:
path (str): Path to the directory containing the Dockerfile
fileobj: A file object to use as the Dockerfile. (Or a file-like
object)
tag (str): A tag to add to the final image
quiet (bool): Whether to return the status
nocache (bool): Don't use the cache when set to ``True``
rm (bool): Remove intermediate containers. The ``docker build``
command now defaults to ``--rm=true``, but we have kept the old
default of `False` to preserve backward compatibility
timeout (int): HTTP timeout
custom_context (bool): Optional if using ``fileobj``
encoding (str): The encoding for a stream. Set to ``gzip`` for
compressing
pull (bool): Downloads any updates to the FROM image in Dockerfiles
forcerm (bool): Always remove intermediate containers, even after
unsuccessful builds
dockerfile (str): path within the build context to the Dockerfile
buildargs (dict): A dictionary of build arguments
container_limits (dict): A dictionary of limits applied to each
container created by the build process. Valid keys:
- memory (int): set memory limit for build
- memswap (int): Total memory (memory + swap), -1 to disable
swap
- cpushares (int): CPU shares (relative weight)
- cpusetcpus (str): CPUs in which to allow execution, e.g.,
``"0-3"``, ``"0,1"``
decode (bool): If set to ``True``, the returned stream will be
decoded into dicts on the fly. Default ``False``
shmsize (int): Size of `/dev/shm` in bytes. The size must be
greater than 0. If omitted the system uses 64MB
labels (dict): A dictionary of labels to set on the image
cache_from (:py:class:`list`): A list of images used for build
cache resolution
target (str): Name of the build-stage to build in a multi-stage
Dockerfile
network_mode (str): networking mode for the run commands during
build
squash (bool): Squash the resulting images layers into a
single layer.
extra_hosts (dict): Extra hosts to add to /etc/hosts in building
containers, as a mapping of hostname to IP address.
platform (str): Platform in the format ``os[/arch[/variant]]``
isolation (str): Isolation technology used during build.
Default: `None`.
Returns:
A generator for the build output.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
``TypeError``
If neither ``path`` nor ``fileobj`` is specified.
"""
remote = context = None
headers = {}
container_limits = container_limits or {}
if path is None and fileobj is None:
raise TypeError("Either path or fileobj needs to be provided.")
if gzip and encoding is not None:
raise errors.DockerException("Can not use custom encoding if gzip is enabled")
for key in container_limits.keys():
if key not in constants.CONTAINER_LIMITS_KEYS:
raise errors.DockerException("Invalid container_limits key {0}".format(key))
if custom_context:
if not fileobj:
raise TypeError("You must specify fileobj with custom_context")
context = fileobj
elif fileobj is not None:
context = utils.mkbuildcontext(fileobj)
elif path.startswith(("http://", "https://", "git://", "github.com/", "git@")):
remote = path
elif not os.path.isdir(path):
raise TypeError("You must specify a directory to build in path")
else:
dockerignore = os.path.join(path, ".dockerignore")
exclude = None
if os.path.exists(dockerignore):
with open(dockerignore, "r") as f:
exclude = list(
filter(
lambda x: x != "" and x[0] != "#",
[l.strip() for l in f.read().splitlines()],
)
)
dockerfile = process_dockerfile(dockerfile, path)
context = utils.tar(path, exclude=exclude, dockerfile=dockerfile, gzip=gzip)
encoding = "gzip" if gzip else encoding
u = self._url("/build")
params = {
"t": tag,
"remote": remote,
"q": quiet,
"nocache": nocache,
"rm": rm,
"forcerm": forcerm,
"pull": pull,
"dockerfile": dockerfile,
}
params.update(container_limits)
if buildargs:
params.update({"buildargs": json.dumps(buildargs)})
if shmsize:
if utils.version_gte(self._version, "1.22"):
params.update({"shmsize": shmsize})
else:
raise errors.InvalidVersion(
"shmsize was only introduced in API version 1.22"
)
if labels:
if utils.version_gte(self._version, "1.23"):
params.update({"labels": json.dumps(labels)})
else:
raise errors.InvalidVersion(
"labels was only introduced in API version 1.23"
)
if cache_from:
if utils.version_gte(self._version, "1.25"):
params.update({"cachefrom": json.dumps(cache_from)})
else:
raise errors.InvalidVersion(
"cache_from was only introduced in API version 1.25"
)
if target:
if utils.version_gte(self._version, "1.29"):
params.update({"target": target})
else:
raise errors.InvalidVersion(
"target was only introduced in API version 1.29"
)
if network_mode:
if utils.version_gte(self._version, "1.25"):
params.update({"networkmode": network_mode})
else:
raise errors.InvalidVersion(
"network_mode was only introduced in API version 1.25"
)
if squash:
if utils.version_gte(self._version, "1.25"):
params.update({"squash": squash})
else:
raise errors.InvalidVersion(
"squash was only introduced in API version 1.25"
)
if extra_hosts is not None:
if utils.version_lt(self._version, "1.27"):
raise errors.InvalidVersion(
"extra_hosts was only introduced in API version 1.27"
)
if isinstance(extra_hosts, dict):
extra_hosts = utils.format_extra_hosts(extra_hosts)
params.update({"extrahosts": extra_hosts})
if platform is not None:
if utils.version_lt(self._version, "1.32"):
raise errors.InvalidVersion(
"platform was only introduced in API version 1.32"
)
params["platform"] = platform
if isolation is not None:
if utils.version_lt(self._version, "1.24"):
raise errors.InvalidVersion(
"isolation was only introduced in API version 1.24"
)
params["isolation"] = isolation
if context is not None:
headers = {"Content-Type": "application/tar"}
if encoding:
headers["Content-Encoding"] = encoding
self._set_auth_headers(headers)
response = self._post(
u,
data=context,
params=params,
headers=headers,
stream=True,
timeout=timeout,
)
if context is not None and not custom_context:
context.close()
return self._stream_helper(response, decode=decode)
|
def build(
self,
path=None,
tag=None,
quiet=False,
fileobj=None,
nocache=False,
rm=False,
timeout=None,
custom_context=False,
encoding=None,
pull=False,
forcerm=False,
dockerfile=None,
container_limits=None,
decode=False,
buildargs=None,
gzip=False,
shmsize=None,
labels=None,
cache_from=None,
target=None,
network_mode=None,
squash=None,
extra_hosts=None,
platform=None,
isolation=None,
):
"""
Similar to the ``docker build`` command. Either ``path`` or ``fileobj``
needs to be set. ``path`` can be a local path (to a directory
containing a Dockerfile) or a remote URL. ``fileobj`` must be a
readable file-like object to a Dockerfile.
If you have a tar file for the Docker build context (including a
Dockerfile) already, pass a readable file-like object to ``fileobj``
and also pass ``custom_context=True``. If the stream is compressed
also, set ``encoding`` to the correct value (e.g ``gzip``).
Example:
>>> from io import BytesIO
>>> from docker import APIClient
>>> dockerfile = '''
... # Shared Volume
... FROM busybox:buildroot-2014.02
... VOLUME /data
... CMD ["/bin/sh"]
... '''
>>> f = BytesIO(dockerfile.encode('utf-8'))
>>> cli = APIClient(base_url='tcp://127.0.0.1:2375')
>>> response = [line for line in cli.build(
... fileobj=f, rm=True, tag='yourname/volume'
... )]
>>> response
['{"stream":" ---\\u003e a9eb17255234\\n"}',
'{"stream":"Step 1 : VOLUME /data\\n"}',
'{"stream":" ---\\u003e Running in abdc1e6896c6\\n"}',
'{"stream":" ---\\u003e 713bca62012e\\n"}',
'{"stream":"Removing intermediate container abdc1e6896c6\\n"}',
'{"stream":"Step 2 : CMD [\\"/bin/sh\\"]\\n"}',
'{"stream":" ---\\u003e Running in dba30f2a1a7e\\n"}',
'{"stream":" ---\\u003e 032b8b2855fc\\n"}',
'{"stream":"Removing intermediate container dba30f2a1a7e\\n"}',
'{"stream":"Successfully built 032b8b2855fc\\n"}']
Args:
path (str): Path to the directory containing the Dockerfile
fileobj: A file object to use as the Dockerfile. (Or a file-like
object)
tag (str): A tag to add to the final image
quiet (bool): Whether to return the status
nocache (bool): Don't use the cache when set to ``True``
rm (bool): Remove intermediate containers. The ``docker build``
command now defaults to ``--rm=true``, but we have kept the old
default of `False` to preserve backward compatibility
timeout (int): HTTP timeout
custom_context (bool): Optional if using ``fileobj``
encoding (str): The encoding for a stream. Set to ``gzip`` for
compressing
pull (bool): Downloads any updates to the FROM image in Dockerfiles
forcerm (bool): Always remove intermediate containers, even after
unsuccessful builds
dockerfile (str): path within the build context to the Dockerfile
buildargs (dict): A dictionary of build arguments
container_limits (dict): A dictionary of limits applied to each
container created by the build process. Valid keys:
- memory (int): set memory limit for build
- memswap (int): Total memory (memory + swap), -1 to disable
swap
- cpushares (int): CPU shares (relative weight)
- cpusetcpus (str): CPUs in which to allow execution, e.g.,
``"0-3"``, ``"0,1"``
decode (bool): If set to ``True``, the returned stream will be
decoded into dicts on the fly. Default ``False``
shmsize (int): Size of `/dev/shm` in bytes. The size must be
greater than 0. If omitted the system uses 64MB
labels (dict): A dictionary of labels to set on the image
cache_from (:py:class:`list`): A list of images used for build
cache resolution
target (str): Name of the build-stage to build in a multi-stage
Dockerfile
network_mode (str): networking mode for the run commands during
build
squash (bool): Squash the resulting images layers into a
single layer.
extra_hosts (dict): Extra hosts to add to /etc/hosts in building
containers, as a mapping of hostname to IP address.
platform (str): Platform in the format ``os[/arch[/variant]]``
isolation (str): Isolation technology used during build.
Default: `None`.
Returns:
A generator for the build output.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
``TypeError``
If neither ``path`` nor ``fileobj`` is specified.
"""
remote = context = None
headers = {}
container_limits = container_limits or {}
if path is None and fileobj is None:
raise TypeError("Either path or fileobj needs to be provided.")
if gzip and encoding is not None:
raise errors.DockerException("Can not use custom encoding if gzip is enabled")
for key in container_limits.keys():
if key not in constants.CONTAINER_LIMITS_KEYS:
raise errors.DockerException("Invalid container_limits key {0}".format(key))
if custom_context:
if not fileobj:
raise TypeError("You must specify fileobj with custom_context")
context = fileobj
elif fileobj is not None:
context = utils.mkbuildcontext(fileobj)
elif path.startswith(("http://", "https://", "git://", "github.com/", "git@")):
remote = path
elif not os.path.isdir(path):
raise TypeError("You must specify a directory to build in path")
else:
dockerignore = os.path.join(path, ".dockerignore")
exclude = None
if os.path.exists(dockerignore):
with open(dockerignore, "r") as f:
exclude = list(
filter(
lambda x: x != "" and x[0] != "#",
[l.strip() for l in f.read().splitlines()],
)
)
if dockerfile and os.path.relpath(dockerfile, path).startswith(".."):
with open(dockerfile, "r") as df:
dockerfile = (
".dockerfile.{0:x}".format(random.getrandbits(160)),
df.read(),
)
else:
dockerfile = (dockerfile, None)
context = utils.tar(path, exclude=exclude, dockerfile=dockerfile, gzip=gzip)
encoding = "gzip" if gzip else encoding
u = self._url("/build")
params = {
"t": tag,
"remote": remote,
"q": quiet,
"nocache": nocache,
"rm": rm,
"forcerm": forcerm,
"pull": pull,
"dockerfile": dockerfile,
}
params.update(container_limits)
if buildargs:
params.update({"buildargs": json.dumps(buildargs)})
if shmsize:
if utils.version_gte(self._version, "1.22"):
params.update({"shmsize": shmsize})
else:
raise errors.InvalidVersion(
"shmsize was only introduced in API version 1.22"
)
if labels:
if utils.version_gte(self._version, "1.23"):
params.update({"labels": json.dumps(labels)})
else:
raise errors.InvalidVersion(
"labels was only introduced in API version 1.23"
)
if cache_from:
if utils.version_gte(self._version, "1.25"):
params.update({"cachefrom": json.dumps(cache_from)})
else:
raise errors.InvalidVersion(
"cache_from was only introduced in API version 1.25"
)
if target:
if utils.version_gte(self._version, "1.29"):
params.update({"target": target})
else:
raise errors.InvalidVersion(
"target was only introduced in API version 1.29"
)
if network_mode:
if utils.version_gte(self._version, "1.25"):
params.update({"networkmode": network_mode})
else:
raise errors.InvalidVersion(
"network_mode was only introduced in API version 1.25"
)
if squash:
if utils.version_gte(self._version, "1.25"):
params.update({"squash": squash})
else:
raise errors.InvalidVersion(
"squash was only introduced in API version 1.25"
)
if extra_hosts is not None:
if utils.version_lt(self._version, "1.27"):
raise errors.InvalidVersion(
"extra_hosts was only introduced in API version 1.27"
)
if isinstance(extra_hosts, dict):
extra_hosts = utils.format_extra_hosts(extra_hosts)
params.update({"extrahosts": extra_hosts})
if platform is not None:
if utils.version_lt(self._version, "1.32"):
raise errors.InvalidVersion(
"platform was only introduced in API version 1.32"
)
params["platform"] = platform
if isolation is not None:
if utils.version_lt(self._version, "1.24"):
raise errors.InvalidVersion(
"isolation was only introduced in API version 1.24"
)
params["isolation"] = isolation
if context is not None:
headers = {"Content-Type": "application/tar"}
if encoding:
headers["Content-Encoding"] = encoding
self._set_auth_headers(headers)
response = self._post(
u,
data=context,
params=params,
headers=headers,
stream=True,
timeout=timeout,
)
if context is not None and not custom_context:
context.close()
return self._stream_helper(response, decode=decode)
|
https://github.com/docker/docker-py/issues/1980
|
$> docker-compose build
Building testservice
Traceback (most recent call last):
File "/usr/bin/docker-compose", line 11, in <module>
load_entry_point('docker-compose==1.20.1', 'console_scripts', 'docker-compose')()
File "/usr/lib/python3.6/site-packages/compose/cli/main.py", line 71, in main
command()
File "/usr/lib/python3.6/site-packages/compose/cli/main.py", line 127, in perform_command
handler(command, command_options)
File "/usr/lib/python3.6/site-packages/compose/cli/main.py", line 280, in build
build_args=build_args)
File "/usr/lib/python3.6/site-packages/compose/project.py", line 372, in build
service.build(no_cache, pull, force_rm, memory, build_args)
File "/usr/lib/python3.6/site-packages/compose/service.py", line 998, in build
'memory': parse_bytes(memory) if memory else None
File "/usr/lib/python3.6/site-packages/docker/api/build.py", line 154, in build
with open(dockerfile, 'r') as df:
FileNotFoundError: [Errno 2] No such file or directory: 'CustomDockerFile'
|
FileNotFoundError
|
def pull(self, repository, tag=None, **kwargs):
"""
Pull an image of the given name and return it. Similar to the
``docker pull`` command.
If no tag is specified, all tags from that repository will be
pulled.
If you want to get the raw pull output, use the
:py:meth:`~docker.api.image.ImageApiMixin.pull` method in the
low-level API.
Args:
name (str): The repository to pull
tag (str): The tag to pull
auth_config (dict): Override the credentials that
:py:meth:`~docker.client.DockerClient.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
(:py:class:`Image` or list): The image that has been pulled.
If no ``tag`` was specified, the method will return a list
of :py:class:`Image` objects belonging to this repository.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> # Pull the image tagged `latest` in the busybox repo
>>> image = client.images.pull('busybox:latest')
>>> # Pull all tags in the busybox repo
>>> images = client.images.pull('busybox')
"""
if not tag:
repository, tag = parse_repository_tag(repository)
self.client.api.pull(repository, tag=tag, **kwargs)
if tag:
return self.get(
"{0}{2}{1}".format(
repository, tag, "@" if tag.startswith("sha256:") else ":"
)
)
return self.list(repository)
|
def pull(self, repository, tag=None, **kwargs):
"""
Pull an image of the given name and return it. Similar to the
``docker pull`` command.
If no tag is specified, all tags from that repository will be
pulled.
If you want to get the raw pull output, use the
:py:meth:`~docker.api.image.ImageApiMixin.pull` method in the
low-level API.
Args:
name (str): The repository to pull
tag (str): The tag to pull
auth_config (dict): Override the credentials that
:py:meth:`~docker.client.DockerClient.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
(:py:class:`Image` or list): The image that has been pulled.
If no ``tag`` was specified, the method will return a list
of :py:class:`Image` objects belonging to this repository.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> # Pull the image tagged `latest` in the busybox repo
>>> image = client.images.pull('busybox:latest')
>>> # Pull all tags in the busybox repo
>>> images = client.images.pull('busybox')
"""
if not tag:
repository, tag = parse_repository_tag(repository)
self.client.api.pull(repository, tag=tag, **kwargs)
if tag:
return self.get("{0}:{1}".format(repository, tag))
return self.list(repository)
|
https://github.com/docker/docker-py/issues/1912
|
import docker
client = docker.from_env()
client.images.pull('python@sha256:7c3028aa4b9a30a34ce778b1fd4f460c9cdf174515a94641a89ef40c115b51e5')
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/docker/api/client.py", line 223, in _raise_for_status
response.raise_for_status()
File "/usr/lib/python3.6/site-packages/requests/models.py", line 935, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 400 Client Error: Bad Request for url: https://192.168.99.100:2376/v1.35/images/python:sha256:7c3028aa4b9a30a34ce778b1fd4f460c9cdf174515a94641a89ef40c115b51e5/json
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/docker/models/images.py", line 311, in pull
return self.get('{0}:{1}'.format(repository, tag))
File "/usr/lib/python3.6/site-packages/docker/models/images.py", line 212, in get
return self.prepare_model(self.client.api.inspect_image(name))
File "/usr/lib/python3.6/site-packages/docker/utils/decorators.py", line 19, in wrapped
return f(self, resource_id, *args, **kwargs)
File "/usr/lib/python3.6/site-packages/docker/api/image.py", line 241, in inspect_image
self._get(self._url("/images/{0}/json", image)), True
File "/usr/lib/python3.6/site-packages/docker/api/client.py", line 229, in _result
self._raise_for_status(response)
File "/usr/lib/python3.6/site-packages/docker/api/client.py", line 225, in _raise_for_status
raise create_api_error_from_http_exception(e)
File "/usr/lib/python3.6/site-packages/docker/errors.py", line 31, in create_api_error_from_http_exception
raise cls(e, response=response, explanation=explanation)
docker.errors.APIError: 400 Client Error: Bad Request ("no such image: python:sha256:7c3028aa4b9a30a34ce778b1fd4f460c9cdf174515a94641a89ef40c115b51e5: invalid reference format")
|
requests.exceptions.HTTPError
|
def create_archive(root, files=None, fileobj=None, gzip=False):
if not fileobj:
fileobj = tempfile.NamedTemporaryFile()
t = tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj)
if files is None:
files = build_file_list(root)
for path in files:
full_path = os.path.join(root, path)
i = t.gettarinfo(full_path, arcname=path)
if i is None:
# This happens when we encounter a socket file. We can safely
# ignore it and proceed.
continue
# Workaround https://bugs.python.org/issue32713
if i.mtime < 0 or i.mtime > 8**11 - 1:
i.mtime = int(i.mtime)
if constants.IS_WINDOWS_PLATFORM:
# Windows doesn't keep track of the execute bit, so we make files
# and directories executable by default.
i.mode = i.mode & 0o755 | 0o111
if i.isfile():
try:
with open(full_path, "rb") as f:
t.addfile(i, f)
except IOError:
raise IOError("Can not read file in context: {}".format(full_path))
else:
# Directories, FIFOs, symlinks... don't need to be read.
t.addfile(i, None)
t.close()
fileobj.seek(0)
return fileobj
|
def create_archive(root, files=None, fileobj=None, gzip=False):
if not fileobj:
fileobj = tempfile.NamedTemporaryFile()
t = tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj)
if files is None:
files = build_file_list(root)
for path in files:
full_path = os.path.join(root, path)
if os.lstat(full_path).st_mode & os.R_OK == 0:
raise IOError("Can not access file in context: {}".format(full_path))
i = t.gettarinfo(full_path, arcname=path)
if i is None:
# This happens when we encounter a socket file. We can safely
# ignore it and proceed.
continue
# Workaround https://bugs.python.org/issue32713
if i.mtime < 0 or i.mtime > 8**11 - 1:
i.mtime = int(i.mtime)
if constants.IS_WINDOWS_PLATFORM:
# Windows doesn't keep track of the execute bit, so we make files
# and directories executable by default.
i.mode = i.mode & 0o755 | 0o111
if i.isfile():
try:
with open(full_path, "rb") as f:
t.addfile(i, f)
except IOError:
t.addfile(i, None)
else:
# Directories, FIFOs, symlinks... don't need to be read.
t.addfile(i, None)
t.close()
fileobj.seek(0)
return fileobj
|
https://github.com/docker/docker-py/issues/1899
|
tim@mbp ~/C/d/docker_archive python3 test_dockerpy_tar.py
Traceback (most recent call last):
File "test_dockerpy_tar.py", line 20, in <module>
CLIENT.build(path=str(CWD), dockerfile=str(DOCKER_FILE))
File "/Users/tim/Code/dockerpy_bug/dockerpy_env/lib/python3.6/site-packages/docker/api/build.py", line 149, in build
path, exclude=exclude, dockerfile=dockerfile, gzip=gzip
File "/Users/tim/Code/dockerpy_bug/dockerpy_env/lib/python3.6/site-packages/docker/utils/build.py", line 14, in tar
root=root, fileobj=fileobj, gzip=gzip
File "/Users/tim/Code/dockerpy_bug/dockerpy_env/lib/python3.6/site-packages/docker/utils/utils.py", line 102, in create_archive
'Can not access file in context: {}'.format(full_path)
OSError: Can not access file in context: /Users/tim/Code/dockerpy_bug/docker_archive/testfile
tim@mbp ~/C/d/docker_archive ls -l
total 8
-rw-r--r-- 1 tim staff 18 Feb 4 00:24 Dockerfile
-rw-r--r-- 1 tim staff 421 Feb 4 00:24 test_dockerpy_tar.py
-r-------- 1 tim staff 0 Feb 4 00:24 testfile
tim@mbp ~/C/d/docker_archive
|
OSError
|
def create_archive(root, files=None, fileobj=None, gzip=False):
if not fileobj:
fileobj = tempfile.NamedTemporaryFile()
t = tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj)
if files is None:
files = build_file_list(root)
for path in files:
full_path = os.path.join(root, path)
if os.lstat(full_path).st_mode & os.R_OK == 0:
raise IOError("Can not access file in context: {}".format(full_path))
i = t.gettarinfo(full_path, arcname=path)
if i is None:
# This happens when we encounter a socket file. We can safely
# ignore it and proceed.
continue
if constants.IS_WINDOWS_PLATFORM:
# Windows doesn't keep track of the execute bit, so we make files
# and directories executable by default.
i.mode = i.mode & 0o755 | 0o111
if i.isfile():
try:
with open(full_path, "rb") as f:
t.addfile(i, f)
except IOError:
t.addfile(i, None)
else:
# Directories, FIFOs, symlinks... don't need to be read.
t.addfile(i, None)
t.close()
fileobj.seek(0)
return fileobj
|
def create_archive(root, files=None, fileobj=None, gzip=False):
if not fileobj:
fileobj = tempfile.NamedTemporaryFile()
t = tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj)
if files is None:
files = build_file_list(root)
for path in files:
full_path = os.path.join(root, path)
if not os.access(full_path, os.R_OK):
raise IOError("Can not access file in context: {}".format(full_path))
i = t.gettarinfo(full_path, arcname=path)
if i is None:
# This happens when we encounter a socket file. We can safely
# ignore it and proceed.
continue
if constants.IS_WINDOWS_PLATFORM:
# Windows doesn't keep track of the execute bit, so we make files
# and directories executable by default.
i.mode = i.mode & 0o755 | 0o111
if i.isfile():
try:
with open(full_path, "rb") as f:
t.addfile(i, f)
except IOError:
t.addfile(i, None)
else:
# Directories, FIFOs, symlinks... don't need to be read.
t.addfile(i, None)
t.close()
fileobj.seek(0)
return fileobj
|
https://github.com/docker/docker-py/issues/1841
|
$ python
Python 2.7.6 (default, Nov 23 2017, 15:49:48)
[GCC 4.8.4] on linux2
Type "help", "copyright", "credits" or "license" for more information.
import docker
docker.__version__
'2.7.0'
docker.utils.create_archive(".", ['doesnt_exist'])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/docker/utils/utils.py", line 103, in create_archive
'Can not access file in context: {}'.format(full_path)
IOError: Can not access file in context: ./doesnt_exist
|
IOError
|
def attach(self, container, stdout=True, stderr=True, stream=False, logs=False):
"""
Attach to a container.
The ``.logs()`` function is a wrapper around this method, which you can
use instead if you want to fetch/stream container output without first
retrieving the entire backlog.
Args:
container (str): The container to attach to.
stdout (bool): Include stdout.
stderr (bool): Include stderr.
stream (bool): Return container output progressively as an iterator
of strings, rather than a single string.
logs (bool): Include the container's previous output.
Returns:
By default, the container's output as a single string.
If ``stream=True``, an iterator of output strings.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
params = {
"logs": logs and 1 or 0,
"stdout": stdout and 1 or 0,
"stderr": stderr and 1 or 0,
"stream": stream and 1 or 0,
}
headers = {"Connection": "Upgrade", "Upgrade": "tcp"}
u = self._url("/containers/{0}/attach", container)
response = self._post(u, headers=headers, params=params, stream=True)
return self._read_from_socket(response, stream, self._check_is_tty(container))
|
def attach(self, container, stdout=True, stderr=True, stream=False, logs=False):
"""
Attach to a container.
The ``.logs()`` function is a wrapper around this method, which you can
use instead if you want to fetch/stream container output without first
retrieving the entire backlog.
Args:
container (str): The container to attach to.
stdout (bool): Include stdout.
stderr (bool): Include stderr.
stream (bool): Return container output progressively as an iterator
of strings, rather than a single string.
logs (bool): Include the container's previous output.
Returns:
By default, the container's output as a single string.
If ``stream=True``, an iterator of output strings.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
params = {
"logs": logs and 1 or 0,
"stdout": stdout and 1 or 0,
"stderr": stderr and 1 or 0,
"stream": stream and 1 or 0,
}
headers = {"Connection": "Upgrade", "Upgrade": "tcp"}
u = self._url("/containers/{0}/attach", container)
response = self._post(u, headers=headers, params=params, stream=stream)
return self._read_from_socket(response, stream, self._check_is_tty(container))
|
https://github.com/docker/docker-py/issues/1717
|
Traceback (most recent call last):
File "reproducer.py", line 18, in <module>
t = Test()
File "reproducer.py", line 16, in __init__
at=self.attach(self.containerid, logs=True)
File "/home/jonny/Src/foo/foo/npmvirt/lib/python2.7/site-packages/docker/utils/decorators.py", line 19, in wrapped
return f(self, resource_id, *args, **kwargs)
File "/home/jonny/Src/foo/foo/npmvirt/lib/python2.7/site-packages/docker/api/container.py", line 56, in attach
response, stream, self._check_is_tty(container)
File "/home/jonny/Src/foo/foo/npmvirt/lib/python2.7/site-packages/docker/api/client.py", line 366, in _read_from_socket
socket = self._get_raw_response_socket(response)
File "/home/jonny/Src/foo/foo/npmvirt/lib/python2.7/site-packages/docker/api/client.py", line 278, in _get_raw_response_socket
sock = response.raw._fp.fp._sock
AttributeError: 'NoneType' object has no attribute '_sock'
|
AttributeError
|
def exec_start(self, exec_id, detach=False, tty=False, stream=False, socket=False):
"""
Start a previously set up exec instance.
Args:
exec_id (str): ID of the exec instance
detach (bool): If true, detach from the exec command.
Default: False
tty (bool): Allocate a pseudo-TTY. Default: False
stream (bool): Stream response data. Default: False
Returns:
(generator or str): If ``stream=True``, a generator yielding
response chunks. A string containing response data otherwise.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
# we want opened socket if socket == True
if isinstance(exec_id, dict):
exec_id = exec_id.get("Id")
data = {"Tty": tty, "Detach": detach}
headers = {} if detach else {"Connection": "Upgrade", "Upgrade": "tcp"}
res = self._post_json(
self._url("/exec/{0}/start", exec_id), headers=headers, data=data, stream=True
)
if detach:
return self._result(res)
if socket:
return self._get_raw_response_socket(res)
return self._read_from_socket(res, stream)
|
def exec_start(self, exec_id, detach=False, tty=False, stream=False, socket=False):
"""
Start a previously set up exec instance.
Args:
exec_id (str): ID of the exec instance
detach (bool): If true, detach from the exec command.
Default: False
tty (bool): Allocate a pseudo-TTY. Default: False
stream (bool): Stream response data. Default: False
Returns:
(generator or str): If ``stream=True``, a generator yielding
response chunks. A string containing response data otherwise.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
# we want opened socket if socket == True
if isinstance(exec_id, dict):
exec_id = exec_id.get("Id")
data = {"Tty": tty, "Detach": detach}
headers = {} if detach else {"Connection": "Upgrade", "Upgrade": "tcp"}
res = self._post_json(
self._url("/exec/{0}/start", exec_id), headers=headers, data=data, stream=True
)
if socket:
return self._get_raw_response_socket(res)
return self._read_from_socket(res, stream)
|
https://github.com/docker/docker-py/issues/1271
|
In [9]: import docker
In [10]: docker.version
Out[10]: '1.10.4'
In [11]: cli = docker.Client()
In [12]: container = cli.create_container('python:2.7.11', command='sleep 1h')
In [13]: cli.start(container['Id'])
In [14]: e = cli.exec_create(container['Id'], 'echo "123"')
In [15]: cli.exec_start(e['Id'], detach=True)
^C---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
<ipython-input-15-d3ef5f6326d6> in <module>()
----> 1 cli.exec_start(e['Id'], detach=True)
/usr/local/pyenv/versions/2.7.12/lib/python2.7/site-packages/docker/utils/decorators.pyc in wrapper(self, *args, **kwargs)
33 )
34 )
---> 35 return f(self, *args, **kwargs)
36 return wrapper
37 return decorator
/usr/local/pyenv/versions/2.7.12/lib/python2.7/site-packages/docker/api/exec_api.pyc in exec_start(self, exec_id, detach, tty, stream, socket)
79 if socket:
80 return self._get_raw_response_socket(res)
---> 81 return self._read_from_socket(res, stream)
/usr/local/pyenv/versions/2.7.12/lib/python2.7/site-packages/docker/client.pyc in _read_from_socket(self, response, stream)
324 return frames_iter(socket)
325 else:
--> 326 return six.binary_type().join(frames_iter(socket))
327
328 def _disable_socket_timeout(self, socket):
/usr/local/pyenv/versions/2.7.12/lib/python2.7/site-packages/docker/utils/socket.pyc in frames_iter(socket)
70 Returns a generator of frames read from socket
71 """
---> 72 n = next_frame_size(socket)
73 while n > 0:
74 yield read(socket, n)
/usr/local/pyenv/versions/2.7.12/lib/python2.7/site-packages/docker/utils/socket.pyc in next_frame_size(socket)
58 """
59 try:
---> 60 data = read_exactly(socket, 8)
61 except SocketError:
62 return 0
/usr/local/pyenv/versions/2.7.12/lib/python2.7/site-packages/docker/utils/socket.pyc in read_exactly(socket, n)
43 data = six.binary_type()
44 while len(data) < n:
---> 45 next_data = read(socket, n - len(data))
46 if not next_data:
47 raise SocketError("Unexpected EOF")
/usr/local/pyenv/versions/2.7.12/lib/python2.7/site-packages/docker/utils/socket.pyc in read(socket, n)
25 # wait for data to become available
26 if not isinstance(socket, NpipeSocket):
---> 27 select.select([socket], [], [])
28
29 try:
KeyboardInterrupt:
|
SocketError
|
def update_headers(f):
def inner(self, *args, **kwargs):
if "HttpHeaders" in self._auth_configs:
if not kwargs.get("headers"):
kwargs["headers"] = self._auth_configs["HttpHeaders"]
else:
kwargs["headers"].update(self._auth_configs["HttpHeaders"])
return f(self, *args, **kwargs)
return inner
|
def update_headers(f):
def inner(self, *args, **kwargs):
if "HttpHeaders" in self._auth_configs:
if "headers" not in kwargs:
kwargs["headers"] = self._auth_configs["HttpHeaders"]
else:
kwargs["headers"].update(self._auth_configs["HttpHeaders"])
return f(self, *args, **kwargs)
return inner
|
https://github.com/docker/docker-py/issues/1148
|
import docker
c = docker.Client()
c.build('https://github.com/docker/compose.git')
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-d78c607c9627> in <module>()
----> 1 c.build('https://github.com/docker/compose.git')
/home/joffrey/.envs/pydocker/local/lib/python2.7/site-packages/docker/api/build.pyc in build(self, path, tag, quiet, fileobj, nocache, rm, stream, timeout, custom_context, encoding, pull, forcerm, dockerfile, container_limits, decode, buildargs, gzip)
102 headers=headers,
103 stream=stream,
--> 104 timeout=timeout,
105 )
106
/home/joffrey/.envs/pydocker/local/lib/python2.7/site-packages/docker/utils/decorators.pyc in inner(self, *args, **kwargs)
44 kwargs['headers'] = self._auth_configs['HttpHeaders']
45 else:
---> 46 kwargs['headers'].update(self._auth_configs['HttpHeaders'])
47 return f(self, *args, **kwargs)
48 return inner
AttributeError: 'NoneType' object has no attribute 'update'
|
AttributeError
|
def inner(self, *args, **kwargs):
if "HttpHeaders" in self._auth_configs:
if not kwargs.get("headers"):
kwargs["headers"] = self._auth_configs["HttpHeaders"]
else:
kwargs["headers"].update(self._auth_configs["HttpHeaders"])
return f(self, *args, **kwargs)
|
def inner(self, *args, **kwargs):
if "HttpHeaders" in self._auth_configs:
if "headers" not in kwargs:
kwargs["headers"] = self._auth_configs["HttpHeaders"]
else:
kwargs["headers"].update(self._auth_configs["HttpHeaders"])
return f(self, *args, **kwargs)
|
https://github.com/docker/docker-py/issues/1148
|
import docker
c = docker.Client()
c.build('https://github.com/docker/compose.git')
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-d78c607c9627> in <module>()
----> 1 c.build('https://github.com/docker/compose.git')
/home/joffrey/.envs/pydocker/local/lib/python2.7/site-packages/docker/api/build.pyc in build(self, path, tag, quiet, fileobj, nocache, rm, stream, timeout, custom_context, encoding, pull, forcerm, dockerfile, container_limits, decode, buildargs, gzip)
102 headers=headers,
103 stream=stream,
--> 104 timeout=timeout,
105 )
106
/home/joffrey/.envs/pydocker/local/lib/python2.7/site-packages/docker/utils/decorators.pyc in inner(self, *args, **kwargs)
44 kwargs['headers'] = self._auth_configs['HttpHeaders']
45 else:
---> 46 kwargs['headers'].update(self._auth_configs['HttpHeaders'])
47 return f(self, *args, **kwargs)
48 return inner
AttributeError: 'NoneType' object has no attribute 'update'
|
AttributeError
|
def _stream_helper(self, response):
"""Generator for data coming from a chunked-encoded HTTP response."""
if response.raw._fp.chunked:
reader = response.raw
while not reader.closed:
# this read call will block until we get a chunk
data = reader.read(1)
if not data:
break
if reader._fp.chunk_left:
data += reader.read(reader._fp.chunk_left)
yield data
else:
# Response isn't chunked, meaning we probably
# encountered an error immediately
yield self._result(response)
|
def _stream_helper(self, response):
"""Generator for data coming from a chunked-encoded HTTP response."""
reader = response.raw
assert reader._fp.chunked
while not reader.closed:
# this read call will block until we get a chunk
data = reader.read(1)
if not data:
break
if reader._fp.chunk_left:
data += reader.read(reader._fp.chunk_left)
yield data
|
https://github.com/docker/docker-py/issues/443
|
Traceback (most recent call last):
File "/Users/omrib/git/docker/main.py", line 6, in <module>
print "\n".join(client.build(fileobj=BytesIO(), tag="a/b/c"))
File "/Users/omrib/git/docker/env/lib/python2.7/site-packages/docker/client.py", line 295, in _stream_helper
assert reader._fp.chunked
AssertionError
|
AssertionError
|
def _stream_helper(self, response):
"""Generator for data coming from a chunked-encoded HTTP response."""
socket_fp = self._get_raw_response_socket(response)
socket_fp.setblocking(1)
socket = socket_fp.makefile()
while True:
# Because Docker introduced newlines at the end of chunks in v0.9,
# and only on some API endpoints, we have to cater for both cases.
size_line = socket.readline()
if size_line == "\r\n" or size_line == "\n":
size_line = socket.readline()
size = int(size_line, 16)
if size <= 0:
break
data = socket.readline()
if not data:
break
yield data
|
def _stream_helper(self, response):
"""Generator for data coming from a chunked-encoded HTTP response."""
socket_fp = self._get_raw_response_socket(response)
socket_fp.setblocking(1)
socket = socket_fp.makefile()
while True:
# Because Docker introduced newlines at the end of chunks in v0.9,
# and only on some API endpoints, we have to cater for both cases.
size_line = socket.readline()
if size_line == "\r\n":
size_line = socket.readline()
size = int(size_line, 16)
if size <= 0:
break
data = socket.readline()
if not data:
break
yield data
|
https://github.com/docker/docker-py/issues/257
|
$ python build.py
{"stream":" ---\u003e b750fe79269d\n"}
Traceback (most recent call last):
File "build.py", line 8, in <module>
for line in client.build('.', tag='atag', rm=True):
File "/Users/adam/PYTHON3/lib/python3.4/site-packages/docker/client.py", line 233, in _stream_helper
size = int(size_line, 16)
ValueError: invalid literal for int() with base 16: '\n'```
|
ValueError
|
def _stream_helper(self, response):
"""Generator for data coming from a chunked-encoded HTTP response."""
for line in response.iter_lines(chunk_size=32):
yield line
|
def _stream_helper(self, response):
"""Generator for data coming from a chunked-encoded HTTP response."""
socket_fp = self._get_raw_response_socket(response)
socket_fp.setblocking(1)
socket = socket_fp.makefile()
while True:
size = int(socket.readline(), 16)
if size <= 0:
break
data = socket.readline()
if not data:
break
yield data
|
https://github.com/docker/docker-py/issues/176
|
ubuntu@ip-10-77-1-34:/tmp/foo$ python test.py
{"stream":" ---\u003e Using cache\n"}
Traceback (most recent call last):
File "test.py", line 5, in <module>
for line in gen:
File "/usr/local/lib/python2.7/dist-packages/docker/client.py", line 239, in _stream_helper
size = int(socket.readline(), 16)
ValueError: invalid literal for int() with base 16: ''
|
ValueError
|
def build(
self,
path=None,
tag=None,
quiet=False,
fileobj=None,
nocache=False,
rm=False,
stream=False,
timeout=None,
):
remote = context = headers = None
if path is None and fileobj is None:
raise Exception("Either path or fileobj needs to be provided.")
if fileobj is not None:
context = utils.mkbuildcontext(fileobj)
elif path.startswith(("http://", "https://", "git://", "github.com/")):
remote = path
else:
context = utils.tar(path)
if utils.compare_version("1.8", self._version) >= 0:
stream = True
u = self._url("/build")
params = {"t": tag, "remote": remote, "q": quiet, "nocache": nocache, "rm": rm}
if context is not None:
headers = {"Content-Type": "application/tar"}
response = self._post(
u,
data=context,
params=params,
headers=headers,
stream=stream,
timeout=timeout,
)
if context is not None:
context.close()
if stream:
return self._stream_helper(response)
else:
output = self._result(response)
srch = r"Successfully built ([0-9a-f]+)"
match = re.search(srch, output)
if not match:
return None, output
return match.group(1), output
|
def build(
self,
path=None,
tag=None,
quiet=False,
fileobj=None,
nocache=False,
rm=False,
stream=False,
timeout=None,
):
remote = context = headers = None
if path is None and fileobj is None:
raise Exception("Either path or fileobj needs to be provided.")
if fileobj is not None:
context = utils.mkbuildcontext(fileobj)
elif path.startswith(("http://", "https://", "git://", "github.com/")):
remote = path
else:
context = utils.tar(path)
u = self._url("/build")
params = {"t": tag, "remote": remote, "q": quiet, "nocache": nocache, "rm": rm}
if context is not None:
headers = {"Content-Type": "application/tar"}
response = self._post(
u,
data=context,
params=params,
headers=headers,
stream=stream,
timeout=timeout,
)
if context is not None:
context.close()
if stream or utils.compare_version("1.8", self._version) >= 0:
return self._stream_helper(response)
else:
output = self._result(response)
srch = r"Successfully built ([0-9a-f]+)"
match = re.search(srch, output)
if not match:
return None, output
return match.group(1), output
|
https://github.com/docker/docker-py/issues/176
|
ubuntu@ip-10-77-1-34:/tmp/foo$ python test.py
{"stream":" ---\u003e Using cache\n"}
Traceback (most recent call last):
File "test.py", line 5, in <module>
for line in gen:
File "/usr/local/lib/python2.7/dist-packages/docker/client.py", line 239, in _stream_helper
size = int(socket.readline(), 16)
ValueError: invalid literal for int() with base 16: ''
|
ValueError
|
def generate_evaluation_code(self, code):
code.mark_pos(self.pos)
self.allocate_temp_result(code)
self.function.generate_evaluation_code(code)
assert self.arg_tuple.mult_factor is None
args = self.arg_tuple.args
for arg in args:
arg.generate_evaluation_code(code)
# make sure function is in temp so that we can replace the reference below if it's a method
reuse_function_temp = self.function.is_temp
if reuse_function_temp:
function = self.function.result()
else:
function = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
self.function.make_owned_reference(code)
code.put("%s = %s; " % (function, self.function.py_result()))
self.function.generate_disposal_code(code)
self.function.free_temps(code)
self_arg = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
code.putln("%s = NULL;" % self_arg)
arg_offset_cname = code.funcstate.allocate_temp(
PyrexTypes.c_int_type, manage_ref=False
)
code.putln("%s = 0;" % arg_offset_cname)
def attribute_is_likely_method(attr):
obj = attr.obj
if obj.is_name and obj.entry.is_pyglobal:
return False # more likely to be a function
return True
if self.function.is_attribute:
likely_method = (
"likely" if attribute_is_likely_method(self.function) else "unlikely"
)
elif self.function.is_name and self.function.cf_state:
# not an attribute itself, but might have been assigned from one (e.g. bound method)
for assignment in self.function.cf_state:
value = assignment.rhs
if (
value
and value.is_attribute
and value.obj.type
and value.obj.type.is_pyobject
):
if attribute_is_likely_method(value):
likely_method = "likely"
break
else:
likely_method = "unlikely"
else:
likely_method = "unlikely"
code.putln(
"if (CYTHON_UNPACK_METHODS && %s(PyMethod_Check(%s))) {"
% (likely_method, function)
)
code.putln("%s = PyMethod_GET_SELF(%s);" % (self_arg, function))
# the following is always true in Py3 (kept only for safety),
# but is false for unbound methods in Py2
code.putln("if (likely(%s)) {" % self_arg)
code.putln("PyObject* function = PyMethod_GET_FUNCTION(%s);" % function)
code.put_incref(self_arg, py_object_type)
code.put_incref("function", py_object_type)
# free method object as early to possible to enable reuse from CPython's freelist
code.put_decref_set(function, py_object_type, "function")
code.putln("%s = 1;" % arg_offset_cname)
code.putln("}")
code.putln("}")
# actually call the function
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectFastCall", "ObjectHandling.c")
)
code.putln("{")
code.putln(
"PyObject *__pyx_callargs[%d] = {%s, %s};"
% (len(args) + 1, self_arg, ", ".join(arg.py_result() for arg in args))
)
code.putln(
"%s = __Pyx_PyObject_FastCall(%s, __pyx_callargs+1-%s, %d+%s);"
% (self.result(), function, arg_offset_cname, len(args), arg_offset_cname)
)
code.put_xdecref_clear(self_arg, py_object_type)
code.funcstate.release_temp(self_arg)
code.funcstate.release_temp(arg_offset_cname)
for arg in args:
arg.generate_disposal_code(code)
arg.free_temps(code)
code.putln(code.error_goto_if_null(self.result(), self.pos))
self.generate_gotref(code)
if reuse_function_temp:
self.function.generate_disposal_code(code)
self.function.free_temps(code)
else:
code.put_decref_clear(function, py_object_type)
code.funcstate.release_temp(function)
code.putln("}")
|
def generate_evaluation_code(self, code):
code.mark_pos(self.pos)
self.allocate_temp_result(code)
self.function.generate_evaluation_code(code)
assert self.arg_tuple.mult_factor is None
args = self.arg_tuple.args
for arg in args:
arg.generate_evaluation_code(code)
# make sure function is in temp so that we can replace the reference below if it's a method
reuse_function_temp = self.function.is_temp
if reuse_function_temp:
function = self.function.result()
else:
function = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
self.function.make_owned_reference(code)
code.put("%s = %s; " % (function, self.function.py_result()))
self.function.generate_disposal_code(code)
self.function.free_temps(code)
self_arg = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
code.putln("%s = NULL;" % self_arg)
arg_offset_cname = code.funcstate.allocate_temp(
PyrexTypes.c_int_type, manage_ref=False
)
code.putln("%s = 0;" % arg_offset_cname)
def attribute_is_likely_method(attr):
obj = attr.obj
if obj.is_name and obj.entry.is_pyglobal:
return False # more likely to be a function
return True
if self.function.is_attribute:
likely_method = (
"likely" if attribute_is_likely_method(self.function) else "unlikely"
)
elif self.function.is_name and self.function.cf_state:
# not an attribute itself, but might have been assigned from one (e.g. bound method)
for assignment in self.function.cf_state:
value = assignment.rhs
if value and value.is_attribute and value.obj.type.is_pyobject:
if attribute_is_likely_method(value):
likely_method = "likely"
break
else:
likely_method = "unlikely"
else:
likely_method = "unlikely"
code.putln(
"if (CYTHON_UNPACK_METHODS && %s(PyMethod_Check(%s))) {"
% (likely_method, function)
)
code.putln("%s = PyMethod_GET_SELF(%s);" % (self_arg, function))
# the following is always true in Py3 (kept only for safety),
# but is false for unbound methods in Py2
code.putln("if (likely(%s)) {" % self_arg)
code.putln("PyObject* function = PyMethod_GET_FUNCTION(%s);" % function)
code.put_incref(self_arg, py_object_type)
code.put_incref("function", py_object_type)
# free method object as early to possible to enable reuse from CPython's freelist
code.put_decref_set(function, py_object_type, "function")
code.putln("%s = 1;" % arg_offset_cname)
code.putln("}")
code.putln("}")
# actually call the function
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectFastCall", "ObjectHandling.c")
)
code.putln("{")
code.putln(
"PyObject *__pyx_callargs[%d] = {%s, %s};"
% (len(args) + 1, self_arg, ", ".join(arg.py_result() for arg in args))
)
code.putln(
"%s = __Pyx_PyObject_FastCall(%s, __pyx_callargs+1-%s, %d+%s);"
% (self.result(), function, arg_offset_cname, len(args), arg_offset_cname)
)
code.put_xdecref_clear(self_arg, py_object_type)
code.funcstate.release_temp(self_arg)
code.funcstate.release_temp(arg_offset_cname)
for arg in args:
arg.generate_disposal_code(code)
arg.free_temps(code)
code.putln(code.error_goto_if_null(self.result(), self.pos))
self.generate_gotref(code)
if reuse_function_temp:
self.function.generate_disposal_code(code)
self.function.free_temps(code)
else:
code.put_decref_clear(function, py_object_type)
code.funcstate.release_temp(function)
code.putln("}")
|
https://github.com/cython/cython/issues/4000
|
$ cythonize -i -3 test_cpdef_func_ptr2.pyx
Compiling /home/leofang/dev/test_cpdef_func_ptr2.pyx because it changed.
[1/1] Cythonizing /home/leofang/dev/test_cpdef_func_ptr2.pyx
Traceback (most recent call last):
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/bin/cythonize", line 11, in <module>
sys.exit(main())
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Cythonize.py", line 223, in main
cython_compile(path, options)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Cythonize.py", line 106, in cython_compile
**options.options)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1102, in cythonize
cythonize_one(*args)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1208, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 3174, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 1981, in generate_function_definitions
self.generate_function_body(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 1743, in generate_function_body
self.body.generate_execution_code(code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 5988, in generate_execution_code
value.generate_evaluation_code(code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/ExprNodes.py", line 6052, in generate_evaluation_code
if value and value.is_attribute and value.obj.type.is_pyobject:
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def infer_type(self, env):
# FIXME: this is way too redundant with analyse_types()
node = self.analyse_as_cimported_attribute_node(env, target=False)
if node is not None:
if node.entry.type and node.entry.type.is_cfunction:
# special-case - function converted to pointer
return PyrexTypes.CPtrType(node.entry.type)
else:
return node.entry.type
node = self.analyse_as_type_attribute(env)
if node is not None:
return node.entry.type
obj_type = self.obj.infer_type(env)
self.analyse_attribute(env, obj_type=obj_type)
if obj_type.is_builtin_type and self.type.is_cfunction:
# special case: C-API replacements for C methods of
# builtin types cannot be inferred as C functions as
# that would prevent their use as bound methods
return py_object_type
elif self.entry and self.entry.is_cmethod:
# special case: bound methods should not be inferred
# as their unbound method types
return py_object_type
return self.type
|
def infer_type(self, env):
# FIXME: this is way too redundant with analyse_types()
node = self.analyse_as_cimported_attribute_node(env, target=False)
if node is not None:
return node.entry.type
node = self.analyse_as_type_attribute(env)
if node is not None:
return node.entry.type
obj_type = self.obj.infer_type(env)
self.analyse_attribute(env, obj_type=obj_type)
if obj_type.is_builtin_type and self.type.is_cfunction:
# special case: C-API replacements for C methods of
# builtin types cannot be inferred as C functions as
# that would prevent their use as bound methods
return py_object_type
elif self.entry and self.entry.is_cmethod:
# special case: bound methods should not be inferred
# as their unbound method types
return py_object_type
return self.type
|
https://github.com/cython/cython/issues/4000
|
$ cythonize -i -3 test_cpdef_func_ptr2.pyx
Compiling /home/leofang/dev/test_cpdef_func_ptr2.pyx because it changed.
[1/1] Cythonizing /home/leofang/dev/test_cpdef_func_ptr2.pyx
Traceback (most recent call last):
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/bin/cythonize", line 11, in <module>
sys.exit(main())
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Cythonize.py", line 223, in main
cython_compile(path, options)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Cythonize.py", line 106, in cython_compile
**options.options)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1102, in cythonize
cythonize_one(*args)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1208, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 3174, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 1981, in generate_function_definitions
self.generate_function_body(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 1743, in generate_function_body
self.body.generate_execution_code(code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 5988, in generate_execution_code
value.generate_evaluation_code(code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/ExprNodes.py", line 6052, in generate_evaluation_code
if value and value.is_attribute and value.obj.type.is_pyobject:
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def generate_evaluation_code(self, code):
code.mark_pos(self.pos)
self.allocate_temp_result(code)
self.function.generate_evaluation_code(code)
assert self.arg_tuple.mult_factor is None
args = self.arg_tuple.args
for arg in args:
arg.generate_evaluation_code(code)
# make sure function is in temp so that we can replace the reference below if it's a method
reuse_function_temp = self.function.is_temp
if reuse_function_temp:
function = self.function.result()
else:
function = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
self.function.make_owned_reference(code)
code.put("%s = %s; " % (function, self.function.py_result()))
self.function.generate_disposal_code(code)
self.function.free_temps(code)
self_arg = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
code.putln("%s = NULL;" % self_arg)
arg_offset_cname = None
if len(args) > 1:
arg_offset_cname = code.funcstate.allocate_temp(
PyrexTypes.c_int_type, manage_ref=False
)
code.putln("%s = 0;" % arg_offset_cname)
def attribute_is_likely_method(attr):
obj = attr.obj
if obj.is_name and obj.entry.is_pyglobal:
return False # more likely to be a function
return True
if self.function.is_attribute:
likely_method = (
"likely" if attribute_is_likely_method(self.function) else "unlikely"
)
elif self.function.is_name and self.function.cf_state:
# not an attribute itself, but might have been assigned from one (e.g. bound method)
for assignment in self.function.cf_state:
value = assignment.rhs
if (
value
and value.is_attribute
and value.obj.type
and value.obj.type.is_pyobject
):
if attribute_is_likely_method(value):
likely_method = "likely"
break
else:
likely_method = "unlikely"
else:
likely_method = "unlikely"
code.putln(
"if (CYTHON_UNPACK_METHODS && %s(PyMethod_Check(%s))) {"
% (likely_method, function)
)
code.putln("%s = PyMethod_GET_SELF(%s);" % (self_arg, function))
# the following is always true in Py3 (kept only for safety),
# but is false for unbound methods in Py2
code.putln("if (likely(%s)) {" % self_arg)
code.putln("PyObject* function = PyMethod_GET_FUNCTION(%s);" % function)
code.put_incref(self_arg, py_object_type)
code.put_incref("function", py_object_type)
# free method object as early to possible to enable reuse from CPython's freelist
code.put_decref_set(function, "function")
if len(args) > 1:
code.putln("%s = 1;" % arg_offset_cname)
code.putln("}")
code.putln("}")
if not args:
# fastest special case: try to avoid tuple creation
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallNoArg", "ObjectHandling.c")
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")
)
code.putln(
"%s = (%s) ? __Pyx_PyObject_CallOneArg(%s, %s) : __Pyx_PyObject_CallNoArg(%s);"
% (self.result(), self_arg, function, self_arg, function)
)
code.put_xdecref_clear(self_arg, py_object_type)
code.funcstate.release_temp(self_arg)
code.putln(code.error_goto_if_null(self.result(), self.pos))
code.put_gotref(self.py_result())
elif len(args) == 1:
# fastest special case: try to avoid tuple creation
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCall2Args", "ObjectHandling.c")
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")
)
arg = args[0]
code.putln(
"%s = (%s) ? __Pyx_PyObject_Call2Args(%s, %s, %s) : __Pyx_PyObject_CallOneArg(%s, %s);"
% (
self.result(),
self_arg,
function,
self_arg,
arg.py_result(),
function,
arg.py_result(),
)
)
code.put_xdecref_clear(self_arg, py_object_type)
code.funcstate.release_temp(self_arg)
arg.generate_disposal_code(code)
arg.free_temps(code)
code.putln(code.error_goto_if_null(self.result(), self.pos))
code.put_gotref(self.py_result())
else:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyFunctionFastCall", "ObjectHandling.c")
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyCFunctionFastCall", "ObjectHandling.c")
)
for test_func, call_prefix in [
("PyFunction_Check", "Py"),
("__Pyx_PyFastCFunction_Check", "PyC"),
]:
code.putln("#if CYTHON_FAST_%sCALL" % call_prefix.upper())
code.putln("if (%s(%s)) {" % (test_func, function))
code.putln(
"PyObject *%s[%d] = {%s, %s};"
% (
Naming.quick_temp_cname,
len(args) + 1,
self_arg,
", ".join(arg.py_result() for arg in args),
)
)
code.putln(
"%s = __Pyx_%sFunction_FastCall(%s, %s+1-%s, %d+%s); %s"
% (
self.result(),
call_prefix,
function,
Naming.quick_temp_cname,
arg_offset_cname,
len(args),
arg_offset_cname,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_xdecref_clear(self_arg, py_object_type)
code.put_gotref(self.py_result())
for arg in args:
arg.generate_disposal_code(code)
code.putln("} else")
code.putln("#endif")
code.putln("{")
args_tuple = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
code.putln(
"%s = PyTuple_New(%d+%s); %s"
% (
args_tuple,
len(args),
arg_offset_cname,
code.error_goto_if_null(args_tuple, self.pos),
)
)
code.put_gotref(args_tuple)
if len(args) > 1:
code.putln("if (%s) {" % self_arg)
code.putln(
"__Pyx_GIVEREF(%s); PyTuple_SET_ITEM(%s, 0, %s); %s = NULL;"
% (self_arg, args_tuple, self_arg, self_arg)
) # stealing owned ref in this case
code.funcstate.release_temp(self_arg)
if len(args) > 1:
code.putln("}")
for i, arg in enumerate(args):
arg.make_owned_reference(code)
code.put_giveref(arg.py_result())
code.putln(
"PyTuple_SET_ITEM(%s, %d+%s, %s);"
% (args_tuple, i, arg_offset_cname, arg.py_result())
)
if len(args) > 1:
code.funcstate.release_temp(arg_offset_cname)
for arg in args:
arg.generate_post_assignment_code(code)
arg.free_temps(code)
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCall", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_Call(%s, %s, NULL); %s"
% (
self.result(),
function,
args_tuple,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
code.put_decref_clear(args_tuple, py_object_type)
code.funcstate.release_temp(args_tuple)
if len(args) == 1:
code.putln("}")
code.putln("}") # !CYTHON_FAST_PYCALL
if reuse_function_temp:
self.function.generate_disposal_code(code)
self.function.free_temps(code)
else:
code.put_decref_clear(function, py_object_type)
code.funcstate.release_temp(function)
|
def generate_evaluation_code(self, code):
code.mark_pos(self.pos)
self.allocate_temp_result(code)
self.function.generate_evaluation_code(code)
assert self.arg_tuple.mult_factor is None
args = self.arg_tuple.args
for arg in args:
arg.generate_evaluation_code(code)
# make sure function is in temp so that we can replace the reference below if it's a method
reuse_function_temp = self.function.is_temp
if reuse_function_temp:
function = self.function.result()
else:
function = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
self.function.make_owned_reference(code)
code.put("%s = %s; " % (function, self.function.py_result()))
self.function.generate_disposal_code(code)
self.function.free_temps(code)
self_arg = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
code.putln("%s = NULL;" % self_arg)
arg_offset_cname = None
if len(args) > 1:
arg_offset_cname = code.funcstate.allocate_temp(
PyrexTypes.c_int_type, manage_ref=False
)
code.putln("%s = 0;" % arg_offset_cname)
def attribute_is_likely_method(attr):
obj = attr.obj
if obj.is_name and obj.entry.is_pyglobal:
return False # more likely to be a function
return True
if self.function.is_attribute:
likely_method = (
"likely" if attribute_is_likely_method(self.function) else "unlikely"
)
elif self.function.is_name and self.function.cf_state:
# not an attribute itself, but might have been assigned from one (e.g. bound method)
for assignment in self.function.cf_state:
value = assignment.rhs
if value and value.is_attribute and value.obj.type.is_pyobject:
if attribute_is_likely_method(value):
likely_method = "likely"
break
else:
likely_method = "unlikely"
else:
likely_method = "unlikely"
code.putln(
"if (CYTHON_UNPACK_METHODS && %s(PyMethod_Check(%s))) {"
% (likely_method, function)
)
code.putln("%s = PyMethod_GET_SELF(%s);" % (self_arg, function))
# the following is always true in Py3 (kept only for safety),
# but is false for unbound methods in Py2
code.putln("if (likely(%s)) {" % self_arg)
code.putln("PyObject* function = PyMethod_GET_FUNCTION(%s);" % function)
code.put_incref(self_arg, py_object_type)
code.put_incref("function", py_object_type)
# free method object as early to possible to enable reuse from CPython's freelist
code.put_decref_set(function, "function")
if len(args) > 1:
code.putln("%s = 1;" % arg_offset_cname)
code.putln("}")
code.putln("}")
if not args:
# fastest special case: try to avoid tuple creation
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallNoArg", "ObjectHandling.c")
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")
)
code.putln(
"%s = (%s) ? __Pyx_PyObject_CallOneArg(%s, %s) : __Pyx_PyObject_CallNoArg(%s);"
% (self.result(), self_arg, function, self_arg, function)
)
code.put_xdecref_clear(self_arg, py_object_type)
code.funcstate.release_temp(self_arg)
code.putln(code.error_goto_if_null(self.result(), self.pos))
code.put_gotref(self.py_result())
elif len(args) == 1:
# fastest special case: try to avoid tuple creation
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCall2Args", "ObjectHandling.c")
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")
)
arg = args[0]
code.putln(
"%s = (%s) ? __Pyx_PyObject_Call2Args(%s, %s, %s) : __Pyx_PyObject_CallOneArg(%s, %s);"
% (
self.result(),
self_arg,
function,
self_arg,
arg.py_result(),
function,
arg.py_result(),
)
)
code.put_xdecref_clear(self_arg, py_object_type)
code.funcstate.release_temp(self_arg)
arg.generate_disposal_code(code)
arg.free_temps(code)
code.putln(code.error_goto_if_null(self.result(), self.pos))
code.put_gotref(self.py_result())
else:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyFunctionFastCall", "ObjectHandling.c")
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyCFunctionFastCall", "ObjectHandling.c")
)
for test_func, call_prefix in [
("PyFunction_Check", "Py"),
("__Pyx_PyFastCFunction_Check", "PyC"),
]:
code.putln("#if CYTHON_FAST_%sCALL" % call_prefix.upper())
code.putln("if (%s(%s)) {" % (test_func, function))
code.putln(
"PyObject *%s[%d] = {%s, %s};"
% (
Naming.quick_temp_cname,
len(args) + 1,
self_arg,
", ".join(arg.py_result() for arg in args),
)
)
code.putln(
"%s = __Pyx_%sFunction_FastCall(%s, %s+1-%s, %d+%s); %s"
% (
self.result(),
call_prefix,
function,
Naming.quick_temp_cname,
arg_offset_cname,
len(args),
arg_offset_cname,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_xdecref_clear(self_arg, py_object_type)
code.put_gotref(self.py_result())
for arg in args:
arg.generate_disposal_code(code)
code.putln("} else")
code.putln("#endif")
code.putln("{")
args_tuple = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
code.putln(
"%s = PyTuple_New(%d+%s); %s"
% (
args_tuple,
len(args),
arg_offset_cname,
code.error_goto_if_null(args_tuple, self.pos),
)
)
code.put_gotref(args_tuple)
if len(args) > 1:
code.putln("if (%s) {" % self_arg)
code.putln(
"__Pyx_GIVEREF(%s); PyTuple_SET_ITEM(%s, 0, %s); %s = NULL;"
% (self_arg, args_tuple, self_arg, self_arg)
) # stealing owned ref in this case
code.funcstate.release_temp(self_arg)
if len(args) > 1:
code.putln("}")
for i, arg in enumerate(args):
arg.make_owned_reference(code)
code.put_giveref(arg.py_result())
code.putln(
"PyTuple_SET_ITEM(%s, %d+%s, %s);"
% (args_tuple, i, arg_offset_cname, arg.py_result())
)
if len(args) > 1:
code.funcstate.release_temp(arg_offset_cname)
for arg in args:
arg.generate_post_assignment_code(code)
arg.free_temps(code)
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCall", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_Call(%s, %s, NULL); %s"
% (
self.result(),
function,
args_tuple,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
code.put_decref_clear(args_tuple, py_object_type)
code.funcstate.release_temp(args_tuple)
if len(args) == 1:
code.putln("}")
code.putln("}") # !CYTHON_FAST_PYCALL
if reuse_function_temp:
self.function.generate_disposal_code(code)
self.function.free_temps(code)
else:
code.put_decref_clear(function, py_object_type)
code.funcstate.release_temp(function)
|
https://github.com/cython/cython/issues/4000
|
$ cythonize -i -3 test_cpdef_func_ptr2.pyx
Compiling /home/leofang/dev/test_cpdef_func_ptr2.pyx because it changed.
[1/1] Cythonizing /home/leofang/dev/test_cpdef_func_ptr2.pyx
Traceback (most recent call last):
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/bin/cythonize", line 11, in <module>
sys.exit(main())
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Cythonize.py", line 223, in main
cython_compile(path, options)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Cythonize.py", line 106, in cython_compile
**options.options)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1102, in cythonize
cythonize_one(*args)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1208, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 3174, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 1981, in generate_function_definitions
self.generate_function_body(env, code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 1743, in generate_function_body
self.body.generate_execution_code(code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/Nodes.py", line 5988, in generate_execution_code
value.generate_evaluation_code(code)
File "/home/leofang/miniconda3/envs/cupy_cuda112_dev/lib/python3.7/site-packages/Cython/Compiler/ExprNodes.py", line 6052, in generate_evaluation_code
if value and value.is_attribute and value.obj.type.is_pyobject:
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def get_type_information_cname(code, dtype, maxdepth=None):
"""
Output the run-time type information (__Pyx_TypeInfo) for given dtype,
and return the name of the type info struct.
Structs with two floats of the same size are encoded as complex numbers.
One can separate between complex numbers declared as struct or with native
encoding by inspecting to see if the fields field of the type is
filled in.
"""
namesuffix = mangle_dtype_name(dtype)
name = "__Pyx_TypeInfo_%s" % namesuffix
structinfo_name = "__Pyx_StructFields_%s" % namesuffix
if dtype.is_error:
return "<error>"
# It's critical that walking the type info doesn't use more stack
# depth than dtype.struct_nesting_depth() returns, so use an assertion for this
if maxdepth is None:
maxdepth = dtype.struct_nesting_depth()
if maxdepth <= 0:
assert False
if name not in code.globalstate.utility_codes:
code.globalstate.utility_codes.add(name)
typecode = code.globalstate["typeinfo"]
arraysizes = []
if dtype.is_array:
while dtype.is_array:
arraysizes.append(dtype.size)
dtype = dtype.base_type
complex_possible = dtype.is_struct_or_union and dtype.can_be_complex()
declcode = dtype.empty_declaration_code()
if dtype.is_simple_buffer_dtype():
structinfo_name = "NULL"
elif dtype.is_struct:
struct_scope = dtype.scope
if dtype.is_cv_qualified:
struct_scope = struct_scope.base_type_scope
# Must pre-call all used types in order not to recurse during utility code writing.
fields = struct_scope.var_entries
assert len(fields) > 0
types = [
get_type_information_cname(code, f.type, maxdepth - 1) for f in fields
]
typecode.putln(
"static __Pyx_StructField %s[] = {" % structinfo_name, safe=True
)
for f, typeinfo in zip(fields, types):
typecode.putln(
' {&%s, "%s", offsetof(%s, %s)},'
% (typeinfo, f.name, dtype.empty_declaration_code(), f.cname),
safe=True,
)
typecode.putln(" {NULL, NULL, 0}", safe=True)
typecode.putln("};", safe=True)
else:
assert False
rep = str(dtype)
flags = "0"
is_unsigned = "0"
if dtype is PyrexTypes.c_char_type:
is_unsigned = "IS_UNSIGNED(%s)" % declcode
typegroup = "'H'"
elif dtype.is_int:
is_unsigned = "IS_UNSIGNED(%s)" % declcode
typegroup = "%s ? 'U' : 'I'" % is_unsigned
elif complex_possible or dtype.is_complex:
typegroup = "'C'"
elif dtype.is_float:
typegroup = "'R'"
elif dtype.is_struct:
typegroup = "'S'"
if dtype.packed:
flags = "__PYX_BUF_FLAGS_PACKED_STRUCT"
elif dtype.is_pyobject:
typegroup = "'O'"
else:
assert False, dtype
typeinfo = (
"static __Pyx_TypeInfo %s = "
'{ "%s", %s, sizeof(%s), { %s }, %s, %s, %s, %s };'
)
tup = (
name,
rep,
structinfo_name,
declcode,
", ".join([str(x) for x in arraysizes]) or "0",
len(arraysizes),
typegroup,
is_unsigned,
flags,
)
typecode.putln(typeinfo % tup, safe=True)
return name
|
def get_type_information_cname(code, dtype, maxdepth=None):
"""
Output the run-time type information (__Pyx_TypeInfo) for given dtype,
and return the name of the type info struct.
Structs with two floats of the same size are encoded as complex numbers.
One can separate between complex numbers declared as struct or with native
encoding by inspecting to see if the fields field of the type is
filled in.
"""
namesuffix = mangle_dtype_name(dtype)
name = "__Pyx_TypeInfo_%s" % namesuffix
structinfo_name = "__Pyx_StructFields_%s" % namesuffix
if dtype.is_error:
return "<error>"
# It's critical that walking the type info doesn't use more stack
# depth than dtype.struct_nesting_depth() returns, so use an assertion for this
if maxdepth is None:
maxdepth = dtype.struct_nesting_depth()
if maxdepth <= 0:
assert False
if name not in code.globalstate.utility_codes:
code.globalstate.utility_codes.add(name)
typecode = code.globalstate["typeinfo"]
arraysizes = []
if dtype.is_array:
while dtype.is_array:
arraysizes.append(dtype.size)
dtype = dtype.base_type
complex_possible = dtype.is_struct_or_union and dtype.can_be_complex()
declcode = dtype.empty_declaration_code()
if dtype.is_simple_buffer_dtype():
structinfo_name = "NULL"
elif dtype.is_struct:
fields = dtype.scope.var_entries
# Must pre-call all used types in order not to recurse utility code
# writing.
assert len(fields) > 0
types = [
get_type_information_cname(code, f.type, maxdepth - 1) for f in fields
]
typecode.putln(
"static __Pyx_StructField %s[] = {" % structinfo_name, safe=True
)
for f, typeinfo in zip(fields, types):
typecode.putln(
' {&%s, "%s", offsetof(%s, %s)},'
% (typeinfo, f.name, dtype.empty_declaration_code(), f.cname),
safe=True,
)
typecode.putln(" {NULL, NULL, 0}", safe=True)
typecode.putln("};", safe=True)
else:
assert False
rep = str(dtype)
flags = "0"
is_unsigned = "0"
if dtype is PyrexTypes.c_char_type:
is_unsigned = "IS_UNSIGNED(%s)" % declcode
typegroup = "'H'"
elif dtype.is_int:
is_unsigned = "IS_UNSIGNED(%s)" % declcode
typegroup = "%s ? 'U' : 'I'" % is_unsigned
elif complex_possible or dtype.is_complex:
typegroup = "'C'"
elif dtype.is_float:
typegroup = "'R'"
elif dtype.is_struct:
typegroup = "'S'"
if dtype.packed:
flags = "__PYX_BUF_FLAGS_PACKED_STRUCT"
elif dtype.is_pyobject:
typegroup = "'O'"
else:
assert False, dtype
typeinfo = (
"static __Pyx_TypeInfo %s = "
'{ "%s", %s, sizeof(%s), { %s }, %s, %s, %s, %s };'
)
tup = (
name,
rep,
structinfo_name,
declcode,
", ".join([str(x) for x in arraysizes]) or "0",
len(arraysizes),
typegroup,
is_unsigned,
flags,
)
typecode.putln(typeinfo % tup, safe=True)
return name
|
https://github.com/cython/cython/issues/2251
|
% cythonize test.pyx
Compiling test.pyx because it changed.
[1/1] Cythonizing test.pyx
Traceback (most recent call last):
File "/home/zhanghj/.local/opt/conda/bin/cythonize", line 11, in <module>
sys.exit(main())
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Build/Cythonize.py", line 196, in main
cython_compile(path, options)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Build/Cythonize.py", line 90, in cython_compile
**options.options)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Build/Dependencies.py", line 1026, in cythonize
cythonize_one(*args)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Build/Dependencies.py", line 1129, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Compiler/Main.py", line 649, in compile_single
return run_pipeline(source, options, full_module_name)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Compiler/Main.py", line 499, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Compiler/Pipeline.py", line 354, in run_pipeline
data = run(phase, data)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Compiler/Pipeline.py", line 334, in run
return phase(data)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Compiler/ModuleNode.py", line 142, in process_implementation
self.generate_c_code(env, options, result)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Compiler/ModuleNode.py", line 397, in generate_c_code
globalstate.use_utility_code(utilcode)
File "Cython/Compiler/Code.py", line 1601, in Cython.Compiler.Code.GlobalState.use_utility_code
File "Cython/Compiler/Code.py", line 674, in Cython.Compiler.Code.LazyUtilityCode.put_code
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Compiler/PyrexTypes.py", line 888, in lazy_utility_callback
context['dtype_typeinfo'] = Buffer.get_type_information_cname(code, self.dtype)
File "/home/zhanghj/.local/opt/conda/lib/python3.6/site-packages/Cython/Compiler/Buffer.py", line 674, in get_type_information_cname
assert len(fields) > 0
AssertionError
|
AssertionError
|
def p_typecast(s):
# s.sy == "<"
pos = s.position()
s.next()
base_type = p_c_base_type(s)
is_memslice = isinstance(base_type, Nodes.MemoryViewSliceTypeNode)
is_other_unnamed_type = isinstance(
base_type,
(
Nodes.TemplatedTypeNode,
Nodes.CConstOrVolatileTypeNode,
Nodes.CTupleBaseTypeNode,
),
)
if not (is_memslice or is_other_unnamed_type) and base_type.name is None:
s.error("Unknown type")
declarator = p_c_declarator(s, empty=1)
if s.sy == "?":
s.next()
typecheck = 1
else:
typecheck = 0
s.expect(">")
operand = p_factor(s)
if is_memslice:
return ExprNodes.CythonArrayNode(pos, base_type_node=base_type, operand=operand)
return ExprNodes.TypecastNode(
pos,
base_type=base_type,
declarator=declarator,
operand=operand,
typecheck=typecheck,
)
|
def p_typecast(s):
# s.sy == "<"
pos = s.position()
s.next()
base_type = p_c_base_type(s)
is_memslice = isinstance(base_type, Nodes.MemoryViewSliceTypeNode)
is_template = isinstance(base_type, Nodes.TemplatedTypeNode)
is_const_volatile = isinstance(base_type, Nodes.CConstOrVolatileTypeNode)
if (
not is_memslice
and not is_template
and not is_const_volatile
and base_type.name is None
):
s.error("Unknown type")
declarator = p_c_declarator(s, empty=1)
if s.sy == "?":
s.next()
typecheck = 1
else:
typecheck = 0
s.expect(">")
operand = p_factor(s)
if is_memslice:
return ExprNodes.CythonArrayNode(pos, base_type_node=base_type, operand=operand)
return ExprNodes.TypecastNode(
pos,
base_type=base_type,
declarator=declarator,
operand=operand,
typecheck=typecheck,
)
|
https://github.com/cython/cython/issues/3808
|
Traceback (most recent call last):
File "/Users/jkirkham/miniconda/envs/cython/bin/cython", line 11, in <module>
sys.exit(setuptools_main())
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Main.py", line 840, in setuptools_main
return main(command_line = 1)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Main.py", line 858, in main
result = compile(sources, options)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Main.py", line 780, in compile
return compile_multiple(source, options)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Main.py", line 757, in compile_multiple
result = run_pipeline(source, options, context=context)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Pipeline.py", line 34, in parse
tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Main.py", line 369, in parse
tree = Parsing.p_module(s, pxd, full_module_name)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 3707, in p_module
body = p_statement_list(s, ctx(level=level), first_statement = 1)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 2341, in p_statement_list
stat = p_statement(s, ctx, first_statement = first_statement)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 2333, in p_statement
return p_simple_statement_list(s, ctx, first_statement=first_statement)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 2154, in p_simple_statement_list
stat = p_simple_statement(s, first_statement = first_statement)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 2148, in p_simple_statement
node = p_expression_or_assignment(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 1523, in p_expression_or_assignment
expr = p_testlist_star_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 1453, in p_testlist_star_expr
expr = p_test_or_starred_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 210, in p_test_or_starred_expr
return p_test(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 142, in p_test
expr = p_or_test(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 163, in p_or_test
return p_rassoc_binop_expr(s, ('or',), p_and_test)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 166, in p_rassoc_binop_expr
n1 = p_subexpr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 179, in p_and_test
return p_rassoc_binop_expr(s, ('and',), p_not_test)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 166, in p_rassoc_binop_expr
n1 = p_subexpr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 189, in p_not_test
return p_comparison(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 195, in p_comparison
n1 = p_starred_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 219, in p_starred_expr
expr = p_bit_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 261, in p_bit_expr
return p_binop_expr(s, ('|',), p_xor_expr)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 97, in p_binop_expr
n1 = p_sub_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 266, in p_xor_expr
return p_binop_expr(s, ('^',), p_and_expr)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 97, in p_binop_expr
n1 = p_sub_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 271, in p_and_expr
return p_binop_expr(s, ('&',), p_shift_expr)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 97, in p_binop_expr
n1 = p_sub_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 276, in p_shift_expr
return p_binop_expr(s, ('<<', '>>'), p_arith_expr)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 97, in p_binop_expr
n1 = p_sub_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 281, in p_arith_expr
return p_binop_expr(s, ('+', '-'), p_term)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 97, in p_binop_expr
n1 = p_sub_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 286, in p_term
return p_binop_expr(s, ('*', '@', '/', '%', '//'), p_factor)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 97, in p_binop_expr
n1 = p_sub_expr(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 292, in p_factor
return _p_factor(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 308, in _p_factor
return p_typecast(s)
File "/Users/jkirkham/miniconda/envs/cython/lib/python3.8/site-packages/Cython/Compiler/Parsing.py", line 322, in p_typecast
and base_type.name is None):
AttributeError: 'CTupleBaseTypeNode' object has no attribute 'name'
|
AttributeError
|
def generate_evaluation_code(self, code):
function = self.function
if function.is_name or function.is_attribute:
code.globalstate.use_entry_utility_code(function.entry)
abs_function_cnames = ("abs", "labs", "__Pyx_abs_longlong")
is_signed_int = self.type.is_int and self.type.signed
if (
self.overflowcheck
and is_signed_int
and function.result() in abs_function_cnames
):
code.globalstate.use_utility_code(
UtilityCode.load_cached("Common", "Overflow.c")
)
code.putln(
'if (unlikely(%s == __PYX_MIN(%s))) {\
PyErr_SetString(PyExc_OverflowError,\
"Trying to take the absolute value of the most negative integer is not defined."); %s; }'
% (
self.args[0].result(),
self.args[0].type.empty_declaration_code(),
code.error_goto(self.pos),
)
)
if (
not function.type.is_pyobject
or len(self.arg_tuple.args) > 1
or (self.arg_tuple.args and self.arg_tuple.is_literal)
):
super(SimpleCallNode, self).generate_evaluation_code(code)
return
# Special case 0-args and try to avoid explicit tuple creation for Python calls with 1 arg.
arg = self.arg_tuple.args[0] if self.arg_tuple.args else None
subexprs = (self.self, self.coerced_self, function, arg)
for subexpr in subexprs:
if subexpr is not None:
subexpr.generate_evaluation_code(code)
code.mark_pos(self.pos)
assert self.is_temp
self.allocate_temp_result(code)
if arg is None:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallNoArg", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_CallNoArg(%s); %s"
% (
self.result(),
function.py_result(),
code.error_goto_if_null(self.result(), self.pos),
)
)
else:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_CallOneArg(%s, %s); %s"
% (
self.result(),
function.py_result(),
arg.py_result(),
code.error_goto_if_null(self.result(), self.pos),
)
)
self.generate_gotref(code)
for subexpr in subexprs:
if subexpr is not None:
subexpr.generate_disposal_code(code)
subexpr.free_temps(code)
|
def generate_evaluation_code(self, code):
function = self.function
if function.is_name or function.is_attribute:
code.globalstate.use_entry_utility_code(function.entry)
if (
not function.type.is_pyobject
or len(self.arg_tuple.args) > 1
or (self.arg_tuple.args and self.arg_tuple.is_literal)
):
super(SimpleCallNode, self).generate_evaluation_code(code)
return
# Special case 0-args and try to avoid explicit tuple creation for Python calls with 1 arg.
arg = self.arg_tuple.args[0] if self.arg_tuple.args else None
subexprs = (self.self, self.coerced_self, function, arg)
for subexpr in subexprs:
if subexpr is not None:
subexpr.generate_evaluation_code(code)
code.mark_pos(self.pos)
assert self.is_temp
self.allocate_temp_result(code)
if arg is None:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallNoArg", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_CallNoArg(%s); %s"
% (
self.result(),
function.py_result(),
code.error_goto_if_null(self.result(), self.pos),
)
)
else:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_CallOneArg(%s, %s); %s"
% (
self.result(),
function.py_result(),
arg.py_result(),
code.error_goto_if_null(self.result(), self.pos),
)
)
self.generate_gotref(code)
for subexpr in subexprs:
if subexpr is not None:
subexpr.generate_disposal_code(code)
subexpr.free_temps(code)
|
https://github.com/cython/cython/issues/1911
|
======================================================================
FAIL: int_abs (builtin_abs)
Doctest: builtin_abs.int_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.int_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in int_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.int_abs
Failed example:
int_abs(-max_int-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-2147483648
======================================================================
FAIL: long_abs (builtin_abs)
Doctest: builtin_abs.long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_abs
Failed example:
long_abs(-max_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: long_long_abs (builtin_abs)
Doctest: builtin_abs.long_long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_long_abs
Failed example:
long_long_abs(-max_long_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: int_abs (builtin_abs)
Doctest: builtin_abs.int_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.int_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in int_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.int_abs
Failed example:
int_abs(-max_int-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-2147483648
======================================================================
FAIL: long_abs (builtin_abs)
Doctest: builtin_abs.long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_abs
Failed example:
long_abs(-max_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: long_long_abs (builtin_abs)
Doctest: builtin_abs.long_long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_long_abs
Failed example:
long_long_abs(-max_long_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
|
AssertionError
|
def generate_result_code(self, code):
func_type = self.function_type()
if func_type.is_pyobject:
arg_code = self.arg_tuple.py_result()
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCall", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_Call(%s, %s, NULL); %s"
% (
self.result(),
self.function.py_result(),
arg_code,
code.error_goto_if_null(self.result(), self.pos),
)
)
self.generate_gotref(code)
elif func_type.is_cfunction:
if self.has_optional_args:
actual_nargs = len(self.args)
expected_nargs = len(func_type.args) - func_type.optional_arg_count
self.opt_arg_struct = code.funcstate.allocate_temp(
func_type.op_arg_struct.base_type, manage_ref=True
)
code.putln(
"%s.%s = %s;"
% (
self.opt_arg_struct,
Naming.pyrex_prefix + "n",
len(self.args) - expected_nargs,
)
)
args = list(zip(func_type.args, self.args))
for formal_arg, actual_arg in args[expected_nargs:actual_nargs]:
code.putln(
"%s.%s = %s;"
% (
self.opt_arg_struct,
func_type.opt_arg_cname(formal_arg.name),
actual_arg.result_as(formal_arg.type),
)
)
exc_checks = []
if self.type.is_pyobject and self.is_temp:
exc_checks.append("!%s" % self.result())
elif self.type.is_memoryviewslice:
assert self.is_temp
exc_checks.append(self.type.error_condition(self.result()))
elif func_type.exception_check != "+":
exc_val = func_type.exception_value
exc_check = func_type.exception_check
if exc_val is not None:
exc_checks.append(
"%s == %s"
% (self.result(), func_type.return_type.cast_code(exc_val))
)
if exc_check:
if self.nogil:
exc_checks.append("__Pyx_ErrOccurredWithGIL()")
else:
exc_checks.append("PyErr_Occurred()")
if self.is_temp or exc_checks:
rhs = self.c_call_code()
if self.result():
lhs = "%s = " % self.result()
if self.is_temp and self.type.is_pyobject:
# return_type = self.type # func_type.return_type
# print "SimpleCallNode.generate_result_code: casting", rhs, \
# "from", return_type, "to pyobject" ###
rhs = typecast(py_object_type, self.type, rhs)
else:
lhs = ""
if func_type.exception_check == "+":
translate_cpp_exception(
code,
self.pos,
"%s%s;" % (lhs, rhs),
self.result() if self.type.is_pyobject else None,
func_type.exception_value,
self.nogil,
)
else:
if exc_checks:
goto_error = code.error_goto_if(" && ".join(exc_checks), self.pos)
else:
goto_error = ""
code.putln("%s%s; %s" % (lhs, rhs, goto_error))
if self.type.is_pyobject and self.result():
self.generate_gotref(code)
if self.has_optional_args:
code.funcstate.release_temp(self.opt_arg_struct)
|
def generate_result_code(self, code):
func_type = self.function_type()
if func_type.is_pyobject:
arg_code = self.arg_tuple.py_result()
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCall", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_Call(%s, %s, NULL); %s"
% (
self.result(),
self.function.py_result(),
arg_code,
code.error_goto_if_null(self.result(), self.pos),
)
)
self.generate_gotref(code)
elif func_type.is_cfunction:
if self.has_optional_args:
actual_nargs = len(self.args)
expected_nargs = len(func_type.args) - func_type.optional_arg_count
self.opt_arg_struct = code.funcstate.allocate_temp(
func_type.op_arg_struct.base_type, manage_ref=True
)
code.putln(
"%s.%s = %s;"
% (
self.opt_arg_struct,
Naming.pyrex_prefix + "n",
len(self.args) - expected_nargs,
)
)
args = list(zip(func_type.args, self.args))
for formal_arg, actual_arg in args[expected_nargs:actual_nargs]:
code.putln(
"%s.%s = %s;"
% (
self.opt_arg_struct,
func_type.opt_arg_cname(formal_arg.name),
actual_arg.result_as(formal_arg.type),
)
)
exc_checks = []
if self.type.is_pyobject and self.is_temp:
exc_checks.append("!%s" % self.result())
elif self.type.is_memoryviewslice:
assert self.is_temp
exc_checks.append(self.type.error_condition(self.result()))
elif func_type.exception_check != "+":
exc_val = func_type.exception_value
exc_check = func_type.exception_check
if exc_val is not None:
exc_checks.append(
"%s == %s"
% (self.result(), func_type.return_type.cast_code(exc_val))
)
if exc_check:
if self.nogil:
exc_checks.append("__Pyx_ErrOccurredWithGIL()")
else:
exc_checks.append("PyErr_Occurred()")
if self.is_temp or exc_checks:
rhs = self.c_call_code()
if self.result():
lhs = "%s = " % self.result()
if self.is_temp and self.type.is_pyobject:
# return_type = self.type # func_type.return_type
# print "SimpleCallNode.generate_result_code: casting", rhs, \
# "from", return_type, "to pyobject" ###
rhs = typecast(py_object_type, self.type, rhs)
else:
lhs = ""
if func_type.exception_check == "+":
translate_cpp_exception(
code,
self.pos,
"%s%s;" % (lhs, rhs),
self.result() if self.type.is_pyobject else None,
func_type.exception_value,
self.nogil,
)
else:
if (
self.overflowcheck
and self.type.is_int
and self.type.signed
and self.function.result() in ("abs", "labs", "__Pyx_abs_longlong")
):
goto_error = (
'if (unlikely(%s < 0)) { PyErr_SetString(PyExc_OverflowError, "value too large"); %s; }'
% (self.result(), code.error_goto(self.pos))
)
elif exc_checks:
goto_error = code.error_goto_if(" && ".join(exc_checks), self.pos)
else:
goto_error = ""
code.putln("%s%s; %s" % (lhs, rhs, goto_error))
if self.type.is_pyobject and self.result():
self.generate_gotref(code)
if self.has_optional_args:
code.funcstate.release_temp(self.opt_arg_struct)
|
https://github.com/cython/cython/issues/1911
|
======================================================================
FAIL: int_abs (builtin_abs)
Doctest: builtin_abs.int_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.int_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in int_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.int_abs
Failed example:
int_abs(-max_int-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-2147483648
======================================================================
FAIL: long_abs (builtin_abs)
Doctest: builtin_abs.long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_abs
Failed example:
long_abs(-max_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: long_long_abs (builtin_abs)
Doctest: builtin_abs.long_long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_long_abs
Failed example:
long_long_abs(-max_long_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: int_abs (builtin_abs)
Doctest: builtin_abs.int_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.int_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in int_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.int_abs
Failed example:
int_abs(-max_int-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-2147483648
======================================================================
FAIL: long_abs (builtin_abs)
Doctest: builtin_abs.long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_abs
Failed example:
long_abs(-max_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: long_long_abs (builtin_abs)
Doctest: builtin_abs.long_long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_long_abs
Failed example:
long_long_abs(-max_long_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
|
AssertionError
|
def generate_evaluation_code(self, code):
function = self.function
if function.is_name or function.is_attribute:
code.globalstate.use_entry_utility_code(function.entry)
abs_function_cnames = ("abs", "labs", "__Pyx_abs_longlong")
is_signed_int = self.type.is_int and self.type.signed
if (
self.overflowcheck
and is_signed_int
and function.result() in abs_function_cnames
):
code.globalstate.use_utility_code(
UtilityCode.load_cached("Common", "Overflow.c")
)
code.putln(
'if (unlikely(%s == __PYX_MIN(%s))) {\
PyErr_SetString(PyExc_OverflowError,\
"Trying to take the absolute value of the most negative integer is not defined."); %s; }'
% (
self.args[0].result(),
self.args[0].type.empty_declaration_code(),
code.error_goto(self.pos),
)
)
if (
not function.type.is_pyobject
or len(self.arg_tuple.args) > 1
or (self.arg_tuple.args and self.arg_tuple.is_literal)
):
super(SimpleCallNode, self).generate_evaluation_code(code)
return
# Special case 0-args and try to avoid explicit tuple creation for Python calls with 1 arg.
arg = self.arg_tuple.args[0] if self.arg_tuple.args else None
subexprs = (self.self, self.coerced_self, function, arg)
for subexpr in subexprs:
if subexpr is not None:
subexpr.generate_evaluation_code(code)
code.mark_pos(self.pos)
assert self.is_temp
self.allocate_temp_result(code)
if arg is None:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallNoArg", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_CallNoArg(%s); %s"
% (
self.result(),
function.py_result(),
code.error_goto_if_null(self.result(), self.pos),
)
)
else:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_CallOneArg(%s, %s); %s"
% (
self.result(),
function.py_result(),
arg.py_result(),
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
for subexpr in subexprs:
if subexpr is not None:
subexpr.generate_disposal_code(code)
subexpr.free_temps(code)
|
def generate_evaluation_code(self, code):
function = self.function
if function.is_name or function.is_attribute:
code.globalstate.use_entry_utility_code(function.entry)
if (
not function.type.is_pyobject
or len(self.arg_tuple.args) > 1
or (self.arg_tuple.args and self.arg_tuple.is_literal)
):
super(SimpleCallNode, self).generate_evaluation_code(code)
return
# Special case 0-args and try to avoid explicit tuple creation for Python calls with 1 arg.
arg = self.arg_tuple.args[0] if self.arg_tuple.args else None
subexprs = (self.self, self.coerced_self, function, arg)
for subexpr in subexprs:
if subexpr is not None:
subexpr.generate_evaluation_code(code)
code.mark_pos(self.pos)
assert self.is_temp
self.allocate_temp_result(code)
if arg is None:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallNoArg", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_CallNoArg(%s); %s"
% (
self.result(),
function.py_result(),
code.error_goto_if_null(self.result(), self.pos),
)
)
else:
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_CallOneArg(%s, %s); %s"
% (
self.result(),
function.py_result(),
arg.py_result(),
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
for subexpr in subexprs:
if subexpr is not None:
subexpr.generate_disposal_code(code)
subexpr.free_temps(code)
|
https://github.com/cython/cython/issues/1911
|
======================================================================
FAIL: int_abs (builtin_abs)
Doctest: builtin_abs.int_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.int_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in int_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.int_abs
Failed example:
int_abs(-max_int-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-2147483648
======================================================================
FAIL: long_abs (builtin_abs)
Doctest: builtin_abs.long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_abs
Failed example:
long_abs(-max_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: long_long_abs (builtin_abs)
Doctest: builtin_abs.long_long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_long_abs
Failed example:
long_long_abs(-max_long_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: int_abs (builtin_abs)
Doctest: builtin_abs.int_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.int_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in int_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.int_abs
Failed example:
int_abs(-max_int-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-2147483648
======================================================================
FAIL: long_abs (builtin_abs)
Doctest: builtin_abs.long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_abs
Failed example:
long_abs(-max_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: long_long_abs (builtin_abs)
Doctest: builtin_abs.long_long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_long_abs
Failed example:
long_long_abs(-max_long_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
|
AssertionError
|
def generate_result_code(self, code):
func_type = self.function_type()
if func_type.is_pyobject:
arg_code = self.arg_tuple.py_result()
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCall", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_Call(%s, %s, NULL); %s"
% (
self.result(),
self.function.py_result(),
arg_code,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
elif func_type.is_cfunction:
if self.has_optional_args:
actual_nargs = len(self.args)
expected_nargs = len(func_type.args) - func_type.optional_arg_count
self.opt_arg_struct = code.funcstate.allocate_temp(
func_type.op_arg_struct.base_type, manage_ref=True
)
code.putln(
"%s.%s = %s;"
% (
self.opt_arg_struct,
Naming.pyrex_prefix + "n",
len(self.args) - expected_nargs,
)
)
args = list(zip(func_type.args, self.args))
for formal_arg, actual_arg in args[expected_nargs:actual_nargs]:
code.putln(
"%s.%s = %s;"
% (
self.opt_arg_struct,
func_type.opt_arg_cname(formal_arg.name),
actual_arg.result_as(formal_arg.type),
)
)
exc_checks = []
if self.type.is_pyobject and self.is_temp:
exc_checks.append("!%s" % self.result())
elif self.type.is_memoryviewslice:
assert self.is_temp
exc_checks.append(self.type.error_condition(self.result()))
elif func_type.exception_check != "+":
exc_val = func_type.exception_value
exc_check = func_type.exception_check
if exc_val is not None:
exc_checks.append(
"%s == %s"
% (self.result(), func_type.return_type.cast_code(exc_val))
)
if exc_check:
if self.nogil:
exc_checks.append("__Pyx_ErrOccurredWithGIL()")
else:
exc_checks.append("PyErr_Occurred()")
if self.is_temp or exc_checks:
rhs = self.c_call_code()
if self.result():
lhs = "%s = " % self.result()
if self.is_temp and self.type.is_pyobject:
# return_type = self.type # func_type.return_type
# print "SimpleCallNode.generate_result_code: casting", rhs, \
# "from", return_type, "to pyobject" ###
rhs = typecast(py_object_type, self.type, rhs)
else:
lhs = ""
if func_type.exception_check == "+":
translate_cpp_exception(
code,
self.pos,
"%s%s;" % (lhs, rhs),
self.result() if self.type.is_pyobject else None,
func_type.exception_value,
self.nogil,
)
else:
if exc_checks:
goto_error = code.error_goto_if(" && ".join(exc_checks), self.pos)
else:
goto_error = ""
code.putln("%s%s; %s" % (lhs, rhs, goto_error))
if self.type.is_pyobject and self.result():
code.put_gotref(self.py_result())
if self.has_optional_args:
code.funcstate.release_temp(self.opt_arg_struct)
|
def generate_result_code(self, code):
func_type = self.function_type()
if func_type.is_pyobject:
arg_code = self.arg_tuple.py_result()
code.globalstate.use_utility_code(
UtilityCode.load_cached("PyObjectCall", "ObjectHandling.c")
)
code.putln(
"%s = __Pyx_PyObject_Call(%s, %s, NULL); %s"
% (
self.result(),
self.function.py_result(),
arg_code,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
elif func_type.is_cfunction:
if self.has_optional_args:
actual_nargs = len(self.args)
expected_nargs = len(func_type.args) - func_type.optional_arg_count
self.opt_arg_struct = code.funcstate.allocate_temp(
func_type.op_arg_struct.base_type, manage_ref=True
)
code.putln(
"%s.%s = %s;"
% (
self.opt_arg_struct,
Naming.pyrex_prefix + "n",
len(self.args) - expected_nargs,
)
)
args = list(zip(func_type.args, self.args))
for formal_arg, actual_arg in args[expected_nargs:actual_nargs]:
code.putln(
"%s.%s = %s;"
% (
self.opt_arg_struct,
func_type.opt_arg_cname(formal_arg.name),
actual_arg.result_as(formal_arg.type),
)
)
exc_checks = []
if self.type.is_pyobject and self.is_temp:
exc_checks.append("!%s" % self.result())
elif self.type.is_memoryviewslice:
assert self.is_temp
exc_checks.append(self.type.error_condition(self.result()))
elif func_type.exception_check != "+":
exc_val = func_type.exception_value
exc_check = func_type.exception_check
if exc_val is not None:
exc_checks.append(
"%s == %s"
% (self.result(), func_type.return_type.cast_code(exc_val))
)
if exc_check:
if self.nogil:
exc_checks.append("__Pyx_ErrOccurredWithGIL()")
else:
exc_checks.append("PyErr_Occurred()")
if self.is_temp or exc_checks:
rhs = self.c_call_code()
if self.result():
lhs = "%s = " % self.result()
if self.is_temp and self.type.is_pyobject:
# return_type = self.type # func_type.return_type
# print "SimpleCallNode.generate_result_code: casting", rhs, \
# "from", return_type, "to pyobject" ###
rhs = typecast(py_object_type, self.type, rhs)
else:
lhs = ""
if func_type.exception_check == "+":
translate_cpp_exception(
code,
self.pos,
"%s%s;" % (lhs, rhs),
self.result() if self.type.is_pyobject else None,
func_type.exception_value,
self.nogil,
)
else:
if (
self.overflowcheck
and self.type.is_int
and self.type.signed
and self.function.result() in ("abs", "labs", "__Pyx_abs_longlong")
):
goto_error = (
'if (unlikely(%s < 0)) { PyErr_SetString(PyExc_OverflowError, "value too large"); %s; }'
% (self.result(), code.error_goto(self.pos))
)
elif exc_checks:
goto_error = code.error_goto_if(" && ".join(exc_checks), self.pos)
else:
goto_error = ""
code.putln("%s%s; %s" % (lhs, rhs, goto_error))
if self.type.is_pyobject and self.result():
code.put_gotref(self.py_result())
if self.has_optional_args:
code.funcstate.release_temp(self.opt_arg_struct)
|
https://github.com/cython/cython/issues/1911
|
======================================================================
FAIL: int_abs (builtin_abs)
Doctest: builtin_abs.int_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.int_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in int_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.int_abs
Failed example:
int_abs(-max_int-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-2147483648
======================================================================
FAIL: long_abs (builtin_abs)
Doctest: builtin_abs.long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_abs
Failed example:
long_abs(-max_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: long_long_abs (builtin_abs)
Doctest: builtin_abs.long_long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/c/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_long_abs
Failed example:
long_long_abs(-max_long_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: int_abs (builtin_abs)
Doctest: builtin_abs.int_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.int_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in int_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.int_abs
Failed example:
int_abs(-max_int-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-2147483648
======================================================================
FAIL: long_abs (builtin_abs)
Doctest: builtin_abs.long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_abs
Failed example:
long_abs(-max_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
======================================================================
FAIL: long_long_abs (builtin_abs)
Doctest: builtin_abs.long_long_abs
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/lib64/python3.6/doctest.py", line 2199, in runTest
raise self.failureException(self.format_failure(new.getvalue()))
AssertionError: Failed doctest test for builtin_abs.long_long_abs
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line unknown line number, in long_long_abs
----------------------------------------------------------------------
File "/home/abuild/rpmbuild/BUILD/Cython-0.27.1/TEST_TMP/run/cpp/builtin_abs/builtin_abs.cpython-36m-x86_64-linux-gnu.so", line ?, in builtin_abs.long_long_abs
Failed example:
long_long_abs(-max_long_long-1) #doctest: +ELLIPSIS
Expected:
Traceback (most recent call last):
...
OverflowError: ...
Got:
-9223372036854775808
|
AssertionError
|
def generate_stararg_copy_code(self, code):
if not self.star_arg:
code.globalstate.use_utility_code(
UtilityCode.load_cached("RaiseArgTupleInvalid", "FunctionArguments.c")
)
code.putln("if (unlikely(%s > 0)) {" % Naming.nargs_cname)
code.put(
"__Pyx_RaiseArgtupleInvalid(%s, 1, 0, 0, %s); return %s;"
% (self.name.as_c_string_literal(), Naming.nargs_cname, self.error_value())
)
code.putln("}")
if self.starstar_arg:
if self.star_arg or not self.starstar_arg.entry.cf_used:
kwarg_check = "unlikely(%s)" % Naming.kwds_cname
else:
kwarg_check = "%s" % Naming.kwds_cname
else:
kwarg_check = "unlikely(%s) && __Pyx_NumKwargs_%s(%s)" % (
Naming.kwds_cname,
self.signature.fastvar,
Naming.kwds_cname,
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("KeywordStringCheck", "FunctionArguments.c")
)
code.putln(
"if (%s && unlikely(!__Pyx_CheckKeywordStrings(%s, %s, %d))) return %s;"
% (
kwarg_check,
Naming.kwds_cname,
self.name.as_c_string_literal(),
bool(self.starstar_arg),
self.error_value(),
)
)
if self.starstar_arg and self.starstar_arg.entry.cf_used:
code.putln("if (%s) {" % kwarg_check)
code.putln(
"%s = __Pyx_KwargsAsDict_%s(%s, %s);"
% (
self.starstar_arg.entry.cname,
self.signature.fastvar,
Naming.kwds_cname,
Naming.kwvalues_cname,
)
)
code.putln(
"if (unlikely(!%s)) return %s;"
% (self.starstar_arg.entry.cname, self.error_value())
)
code.put_gotref(self.starstar_arg.entry.cname, py_object_type)
code.putln("} else {")
allow_null = all(
ref.node.allow_null for ref in self.starstar_arg.entry.cf_references
)
if allow_null:
code.putln("%s = NULL;" % (self.starstar_arg.entry.cname,))
else:
code.putln("%s = PyDict_New();" % (self.starstar_arg.entry.cname,))
code.putln(
"if (unlikely(!%s)) return %s;"
% (self.starstar_arg.entry.cname, self.error_value())
)
code.put_var_gotref(self.starstar_arg.entry)
self.starstar_arg.entry.xdecref_cleanup = allow_null
code.putln("}")
if self.self_in_stararg and not self.target.is_staticmethod:
assert not self.signature.use_fastcall
# need to create a new tuple with 'self' inserted as first item
code.put(
"%s = PyTuple_New(%s + 1); if (unlikely(!%s)) "
% (self.star_arg.entry.cname, Naming.nargs_cname, self.star_arg.entry.cname)
)
if self.starstar_arg and self.starstar_arg.entry.cf_used:
code.putln("{")
code.put_var_xdecref_clear(self.starstar_arg.entry)
code.putln("return %s;" % self.error_value())
code.putln("}")
else:
code.putln("return %s;" % self.error_value())
code.put_var_gotref(self.star_arg.entry)
code.put_incref(Naming.self_cname, py_object_type)
code.put_giveref(Naming.self_cname, py_object_type)
code.putln(
"PyTuple_SET_ITEM(%s, 0, %s);"
% (self.star_arg.entry.cname, Naming.self_cname)
)
temp = code.funcstate.allocate_temp(
PyrexTypes.c_py_ssize_t_type, manage_ref=False
)
code.putln(
"for (%s=0; %s < %s; %s++) {" % (temp, temp, Naming.nargs_cname, temp)
)
code.putln(
"PyObject* item = PyTuple_GET_ITEM(%s, %s);" % (Naming.args_cname, temp)
)
code.put_incref("item", py_object_type)
code.put_giveref("item", py_object_type)
code.putln(
"PyTuple_SET_ITEM(%s, %s+1, item);" % (self.star_arg.entry.cname, temp)
)
code.putln("}")
code.funcstate.release_temp(temp)
self.star_arg.entry.xdecref_cleanup = 0
elif self.star_arg:
assert not self.signature.use_fastcall
code.put_incref(Naming.args_cname, py_object_type)
code.putln("%s = %s;" % (self.star_arg.entry.cname, Naming.args_cname))
self.star_arg.entry.xdecref_cleanup = 0
|
def generate_stararg_copy_code(self, code):
if not self.star_arg:
code.globalstate.use_utility_code(
UtilityCode.load_cached("RaiseArgTupleInvalid", "FunctionArguments.c")
)
code.putln("if (unlikely(%s > 0)) {" % Naming.nargs_cname)
code.put(
'__Pyx_RaiseArgtupleInvalid("%s", 1, 0, 0, %s); return %s;'
% (self.name, Naming.nargs_cname, self.error_value())
)
code.putln("}")
if self.starstar_arg:
if self.star_arg or not self.starstar_arg.entry.cf_used:
kwarg_check = "unlikely(%s)" % Naming.kwds_cname
else:
kwarg_check = "%s" % Naming.kwds_cname
else:
kwarg_check = "unlikely(%s) && __Pyx_NumKwargs_%s(%s)" % (
Naming.kwds_cname,
self.signature.fastvar,
Naming.kwds_cname,
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("KeywordStringCheck", "FunctionArguments.c")
)
code.putln(
'if (%s && unlikely(!__Pyx_CheckKeywordStrings(%s, "%s", %d))) return %s;'
% (
kwarg_check,
Naming.kwds_cname,
self.name,
bool(self.starstar_arg),
self.error_value(),
)
)
if self.starstar_arg and self.starstar_arg.entry.cf_used:
code.putln("if (%s) {" % kwarg_check)
code.putln(
"%s = __Pyx_KwargsAsDict_%s(%s, %s);"
% (
self.starstar_arg.entry.cname,
self.signature.fastvar,
Naming.kwds_cname,
Naming.kwvalues_cname,
)
)
code.putln(
"if (unlikely(!%s)) return %s;"
% (self.starstar_arg.entry.cname, self.error_value())
)
code.put_gotref(self.starstar_arg.entry.cname, py_object_type)
code.putln("} else {")
allow_null = all(
ref.node.allow_null for ref in self.starstar_arg.entry.cf_references
)
if allow_null:
code.putln("%s = NULL;" % (self.starstar_arg.entry.cname,))
else:
code.putln("%s = PyDict_New();" % (self.starstar_arg.entry.cname,))
code.putln(
"if (unlikely(!%s)) return %s;"
% (self.starstar_arg.entry.cname, self.error_value())
)
code.put_var_gotref(self.starstar_arg.entry)
self.starstar_arg.entry.xdecref_cleanup = allow_null
code.putln("}")
if self.self_in_stararg and not self.target.is_staticmethod:
assert not self.signature.use_fastcall
# need to create a new tuple with 'self' inserted as first item
code.put(
"%s = PyTuple_New(%s + 1); if (unlikely(!%s)) "
% (self.star_arg.entry.cname, Naming.nargs_cname, self.star_arg.entry.cname)
)
if self.starstar_arg and self.starstar_arg.entry.cf_used:
code.putln("{")
code.put_var_xdecref_clear(self.starstar_arg.entry)
code.putln("return %s;" % self.error_value())
code.putln("}")
else:
code.putln("return %s;" % self.error_value())
code.put_var_gotref(self.star_arg.entry)
code.put_incref(Naming.self_cname, py_object_type)
code.put_giveref(Naming.self_cname, py_object_type)
code.putln(
"PyTuple_SET_ITEM(%s, 0, %s);"
% (self.star_arg.entry.cname, Naming.self_cname)
)
temp = code.funcstate.allocate_temp(
PyrexTypes.c_py_ssize_t_type, manage_ref=False
)
code.putln(
"for (%s=0; %s < %s; %s++) {" % (temp, temp, Naming.nargs_cname, temp)
)
code.putln(
"PyObject* item = PyTuple_GET_ITEM(%s, %s);" % (Naming.args_cname, temp)
)
code.put_incref("item", py_object_type)
code.put_giveref("item", py_object_type)
code.putln(
"PyTuple_SET_ITEM(%s, %s+1, item);" % (self.star_arg.entry.cname, temp)
)
code.putln("}")
code.funcstate.release_temp(temp)
self.star_arg.entry.xdecref_cleanup = 0
elif self.star_arg:
assert not self.signature.use_fastcall
code.put_incref(Naming.args_cname, py_object_type)
code.putln("%s = %s;" % (self.star_arg.entry.cname, Naming.args_cname))
self.star_arg.entry.xdecref_cleanup = 0
|
https://github.com/cython/cython/issues/3090
|
In [1]: %load_ext cython
In [2]: %%cython
...: def f(k): return k
In [3]: f(k=1)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-3-2f40ef43e783> in <module>()
----> 1 f(k=1)
TypeError: f() takes no keyword arguments
|
TypeError
|
def _build_fstring(self, pos, ustring, format_args):
# Issues formatting warnings instead of errors since we really only catch a few errors by accident.
args = iter(format_args)
substrings = []
can_be_optimised = True
for s in re.split(self._parse_string_format_regex, ustring):
if not s:
continue
if s == "%%":
substrings.append(
ExprNodes.UnicodeNode(
pos, value=EncodedString("%"), constant_result="%"
)
)
continue
if s[0] != "%":
if s[-1] == "%":
warning(pos, "Incomplete format: '...%s'" % s[-3:], level=1)
can_be_optimised = False
substrings.append(
ExprNodes.UnicodeNode(pos, value=EncodedString(s), constant_result=s)
)
continue
format_type = s[-1]
try:
arg = next(args)
except StopIteration:
warning(pos, "Too few arguments for format placeholders", level=1)
can_be_optimised = False
break
if arg.is_starred:
can_be_optimised = False
break
if format_type in "asrfdoxX":
format_spec = s[1:]
conversion_char = None
if format_type in "doxX" and "." in format_spec:
# Precision is not allowed for integers in format(), but ok in %-formatting.
can_be_optimised = False
elif format_type in "ars":
format_spec = format_spec[:-1]
conversion_char = format_type
if format_spec.startswith("0"):
format_spec = (
">" + format_spec[1:]
) # right-alignment '%05s' spells '{:>5}'
elif format_type == "d":
# '%d' formatting supports float, but '{obj:d}' does not => convert to int first.
conversion_char = "d"
if format_spec.startswith("-"):
format_spec = (
"<" + format_spec[1:]
) # left-alignment '%-5s' spells '{:<5}'
substrings.append(
ExprNodes.FormattedValueNode(
arg.pos,
value=arg,
conversion_char=conversion_char,
format_spec=ExprNodes.UnicodeNode(
pos,
value=EncodedString(format_spec),
constant_result=format_spec,
)
if format_spec
else None,
)
)
else:
# keep it simple for now ...
can_be_optimised = False
break
if not can_be_optimised:
# Print all warnings we can find before finally giving up here.
return None
try:
next(args)
except StopIteration:
pass
else:
warning(pos, "Too many arguments for format placeholders", level=1)
return None
node = ExprNodes.JoinedStrNode(pos, values=substrings)
return self.visit_JoinedStrNode(node)
|
def _build_fstring(self, pos, ustring, format_args):
# Issues formatting warnings instead of errors since we really only catch a few errors by accident.
args = iter(format_args)
substrings = []
can_be_optimised = True
for s in re.split(self._parse_string_format_regex, ustring):
if not s:
continue
if s == "%%":
substrings.append(
ExprNodes.UnicodeNode(
pos, value=EncodedString("%"), constant_result="%"
)
)
continue
if s[0] != "%":
if s[-1] == "%":
warning(pos, "Incomplete format: '...%s'" % s[-3:], level=1)
can_be_optimised = False
substrings.append(
ExprNodes.UnicodeNode(pos, value=EncodedString(s), constant_result=s)
)
continue
format_type = s[-1]
try:
arg = next(args)
except StopIteration:
warning(pos, "Too few arguments for format placeholders", level=1)
can_be_optimised = False
break
if arg.is_starred:
can_be_optimised = False
break
if format_type in "asrfdoxX":
format_spec = s[1:]
conversion_char = None
if format_type in "doxX" and "." in format_spec:
# Precision is not allowed for integers in format(), but ok in %-formatting.
can_be_optimised = False
elif format_type in "ars":
format_spec = format_spec[:-1]
conversion_char = format_type
elif format_type == "d":
# '%d' formatting supports float, but '{obj:d}' does not => convert to int first.
conversion_char = "d"
substrings.append(
ExprNodes.FormattedValueNode(
arg.pos,
value=arg,
conversion_char=conversion_char,
format_spec=ExprNodes.UnicodeNode(
pos,
value=EncodedString(format_spec),
constant_result=format_spec,
)
if format_spec
else None,
)
)
else:
# keep it simple for now ...
can_be_optimised = False
break
if not can_be_optimised:
# Print all warnings we can find before finally giving up here.
return None
try:
next(args)
except StopIteration:
pass
else:
warning(pos, "Too many arguments for format placeholders", level=1)
return None
node = ExprNodes.JoinedStrNode(pos, values=substrings)
return self.visit_JoinedStrNode(node)
|
https://github.com/cython/cython/issues/3476
|
$ cython --embed -3 foo.py && gcc foo.c -lpython3.8 -o foo
$ ./foo
str: ' '
Traceback (most recent call last):
File "foo.py", line 2, in init foo
print("str: '%08s'" % ("",))
ValueError: '=' alignment not allowed in string format specifier
|
ValueError
|
def _build_fstring(self, pos, ustring, format_args):
# Issues formatting warnings instead of errors since we really only catch a few errors by accident.
args = iter(format_args)
substrings = []
can_be_optimised = True
for s in re.split(self._parse_string_format_regex, ustring):
if not s:
continue
if s == "%%":
substrings.append(
ExprNodes.UnicodeNode(
pos, value=EncodedString("%"), constant_result="%"
)
)
continue
if s[0] != "%":
if s[-1] == "%":
warning(pos, "Incomplete format: '...%s'" % s[-3:], level=1)
can_be_optimised = False
substrings.append(
ExprNodes.UnicodeNode(pos, value=EncodedString(s), constant_result=s)
)
continue
format_type = s[-1]
try:
arg = next(args)
except StopIteration:
warning(pos, "Too few arguments for format placeholders", level=1)
can_be_optimised = False
break
if arg.is_starred:
can_be_optimised = False
break
if format_type in "asrfdoxX":
format_spec = s[1:]
conversion_char = None
if format_type in "doxX" and "." in format_spec:
# Precision is not allowed for integers in format(), but ok in %-formatting.
can_be_optimised = False
elif format_type in "ars":
format_spec = format_spec[:-1]
conversion_char = format_type
elif format_type == "d":
# '%d' formatting supports float, but '{obj:d}' does not => convert to int first.
conversion_char = "d"
substrings.append(
ExprNodes.FormattedValueNode(
arg.pos,
value=arg,
conversion_char=conversion_char,
format_spec=ExprNodes.UnicodeNode(
pos,
value=EncodedString(format_spec),
constant_result=format_spec,
)
if format_spec
else None,
)
)
else:
# keep it simple for now ...
can_be_optimised = False
break
if not can_be_optimised:
# Print all warnings we can find before finally giving up here.
return None
try:
next(args)
except StopIteration:
pass
else:
warning(pos, "Too many arguments for format placeholders", level=1)
return None
node = ExprNodes.JoinedStrNode(pos, values=substrings)
return self.visit_JoinedStrNode(node)
|
def _build_fstring(self, pos, ustring, format_args):
# Issues formatting warnings instead of errors since we really only catch a few errors by accident.
args = iter(format_args)
substrings = []
can_be_optimised = True
for s in re.split(self._parse_string_format_regex, ustring):
if not s:
continue
if s == "%%":
substrings.append(
ExprNodes.UnicodeNode(
pos, value=EncodedString("%"), constant_result="%"
)
)
continue
if s[0] != "%":
if s[-1] == "%":
warning(pos, "Incomplete format: '...%s'" % s[-3:], level=1)
can_be_optimised = False
substrings.append(
ExprNodes.UnicodeNode(pos, value=EncodedString(s), constant_result=s)
)
continue
format_type = s[-1]
try:
arg = next(args)
except StopIteration:
warning(pos, "Too few arguments for format placeholders", level=1)
can_be_optimised = False
break
if arg.is_starred:
can_be_optimised = False
break
if format_type in "asrfdoxX":
format_spec = s[1:]
if format_type in "doxX" and "." in format_spec:
# Precision is not allowed for integers in format(), but ok in %-formatting.
can_be_optimised = False
elif format_type in "ars":
format_spec = format_spec[:-1]
substrings.append(
ExprNodes.FormattedValueNode(
arg.pos,
value=arg,
conversion_char=format_type if format_type in "ars" else None,
format_spec=ExprNodes.UnicodeNode(
pos,
value=EncodedString(format_spec),
constant_result=format_spec,
)
if format_spec
else None,
)
)
else:
# keep it simple for now ...
can_be_optimised = False
break
if not can_be_optimised:
# Print all warnings we can find before finally giving up here.
return None
try:
next(args)
except StopIteration:
pass
else:
warning(pos, "Too many arguments for format placeholders", level=1)
return None
node = ExprNodes.JoinedStrNode(pos, values=substrings)
return self.visit_JoinedStrNode(node)
|
https://github.com/cython/cython/issues/3092
|
(<class 'float'>, <class 'float'>)
(50.0, 50.0)
string being formatted=50
(<class 'float'>, <class 'float'>)
(50.0, 50.0)
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "cytest/test.py", line 16, in init cytest.test
test_func_with_tuple(50.,50.)
File "cytest/test.py", line 11, in cytest.test.test_func_with_tuple
s = 'string being formatted=%d-%d' % (a,b)
ValueError: Unknown format code 'd' for object of type 'float'
|
ValueError
|
def declare_builtin(self, name, pos):
name = self.mangle_class_private_name(name)
return self.outer_scope.declare_builtin(name, pos)
|
def declare_builtin(self, name, pos):
return self.outer_scope.declare_builtin(name, pos)
|
https://github.com/cython/cython/issues/3548
|
(bleeding) ✔ ~/source/other_source/pandas [master {pandas/master}|✚ 2]
jupiter@15:19 ➤ pip install -v .
Non-user install because user site-packages disabled
Created temporary directory: /tmp/pip-ephem-wheel-cache-zcmyaxf3
Created temporary directory: /tmp/pip-req-tracker-dfm6xgbp
Initialized build tracking at /tmp/pip-req-tracker-dfm6xgbp
Created build tracker: /tmp/pip-req-tracker-dfm6xgbp
Entered build tracker: /tmp/pip-req-tracker-dfm6xgbp
Created temporary directory: /tmp/pip-install-_tjupc3v
Processing /home/tcaswell/source/other_source/pandas
Created temporary directory: /tmp/pip-req-build-eh2hlgu3
Added file:///home/tcaswell/source/other_source/pandas to build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Running setup.py (path:/tmp/pip-req-build-eh2hlgu3/setup.py) egg_info for package from file:///home/tcaswell/source/other_source/pandas
Running command python setup.py egg_info
warning: pandas/_libs/groupby.pyx:1101:26: Unreachable code
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
ctypedef struct __Pyx_memviewslice:
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
^
------------------------------------------------------------
__pyxutil:15:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
ctypedef unsigned long long ___pyx_uint64_t "__pyx_t_5numpy_uint64_t"
^
------------------------------------------------------------
__pyxutil:17:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
^
------------------------------------------------------------
__pyxutil:16:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 794, in <module>
setup_package()
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 764, in setup_package
ext_modules=maybe_cythonize(extensions, compiler_directives=directives),
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 537, in maybe_cythonize
return cythonize(extensions, *args, **kwargs)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1105, in cythonize
cythonize_one(*args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1263, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: pandas/_libs/interval.pyx
Compiling pandas/_libs/algos.pyx because it changed.
Compiling pandas/_libs/groupby.pyx because it changed.
Compiling pandas/_libs/hashing.pyx because it changed.
Compiling pandas/_libs/hashtable.pyx because it changed.
Compiling pandas/_libs/index.pyx because it changed.
Compiling pandas/_libs/indexing.pyx because it changed.
Compiling pandas/_libs/internals.pyx because it changed.
Compiling pandas/_libs/interval.pyx because it changed.
Compiling pandas/_libs/join.pyx because it changed.
Compiling pandas/_libs/lib.pyx because it changed.
Compiling pandas/_libs/missing.pyx because it changed.
Compiling pandas/_libs/parsers.pyx because it changed.
Compiling pandas/_libs/reduction.pyx because it changed.
Compiling pandas/_libs/ops.pyx because it changed.
Compiling pandas/_libs/ops_dispatch.pyx because it changed.
Compiling pandas/_libs/properties.pyx because it changed.
Compiling pandas/_libs/reshape.pyx because it changed.
Compiling pandas/_libs/sparse.pyx because it changed.
Compiling pandas/_libs/tslib.pyx because it changed.
Compiling pandas/_libs/tslibs/c_timestamp.pyx because it changed.
Compiling pandas/_libs/tslibs/ccalendar.pyx because it changed.
Compiling pandas/_libs/tslibs/conversion.pyx because it changed.
Compiling pandas/_libs/tslibs/fields.pyx because it changed.
Compiling pandas/_libs/tslibs/frequencies.pyx because it changed.
Compiling pandas/_libs/tslibs/nattype.pyx because it changed.
Compiling pandas/_libs/tslibs/np_datetime.pyx because it changed.
Compiling pandas/_libs/tslibs/offsets.pyx because it changed.
Compiling pandas/_libs/tslibs/parsing.pyx because it changed.
Compiling pandas/_libs/tslibs/period.pyx because it changed.
Compiling pandas/_libs/tslibs/resolution.pyx because it changed.
Compiling pandas/_libs/tslibs/strptime.pyx because it changed.
Compiling pandas/_libs/tslibs/timedeltas.pyx because it changed.
Compiling pandas/_libs/tslibs/timestamps.pyx because it changed.
Compiling pandas/_libs/tslibs/timezones.pyx because it changed.
Compiling pandas/_libs/tslibs/tzconversion.pyx because it changed.
Compiling pandas/_libs/testing.pyx because it changed.
Compiling pandas/_libs/window/aggregations.pyx because it changed.
Compiling pandas/_libs/window/indexers.pyx because it changed.
Compiling pandas/_libs/writers.pyx because it changed.
Compiling pandas/io/sas/sas.pyx because it changed.
[ 1/40] Cythonizing pandas/_libs/algos.pyx
[ 2/40] Cythonizing pandas/_libs/groupby.pyx
[ 3/40] Cythonizing pandas/_libs/hashing.pyx
[ 4/40] Cythonizing pandas/_libs/hashtable.pyx
[ 5/40] Cythonizing pandas/_libs/index.pyx
[ 6/40] Cythonizing pandas/_libs/indexing.pyx
[ 7/40] Cythonizing pandas/_libs/internals.pyx
[ 8/40] Cythonizing pandas/_libs/interval.pyx
Cleaning up...
Removing source in /tmp/pip-req-build-eh2hlgu3
Removed file:///home/tcaswell/source/other_source/pandas from build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Removed build tracker: '/tmp/pip-req-tracker-dfm6xgbp'
ERROR: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
Exception information:
Traceback (most recent call last):
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/cli/base_command.py", line 186, in _main
status = self.run(options, args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/commands/install.py", line 331, in run
resolver.resolve(requirement_set)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 177, in resolve
discovered_reqs.extend(self._resolve_one(requirement_set, req))
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 333, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 282, in _get_abstract_dist_for
abstract_dist = self.preparer.prepare_linked_requirement(req)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 515, in prepare_linked_requirement
abstract_dist = _get_prepared_distribution(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 95, in _get_prepared_distribution
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py", line 40, in prepare_distribution_metadata
self.req.prepare_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 564, in prepare_metadata
self.metadata_directory = self._generate_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 538, in _generate_metadata
return generate_metadata_legacy(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py", line 115, in generate_metadata
call_subprocess(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py", line 242, in call_subprocess
raise InstallationError(exc_msg)
pip._internal.exceptions.InstallationError: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
|
Cython.Compiler.Errors.CompileError
|
def lookup(self, name):
# Look up name in this scope or an enclosing one.
# Return None if not found.
mangled_name = self.mangle_class_private_name(name)
entry = (
self.lookup_here(name) # lookup here also does mangling
or (self.outer_scope and self.outer_scope.lookup(mangled_name))
or None
)
if entry:
return entry
# look up the original name in the outer scope
# Not strictly Python behaviour but see https://github.com/cython/cython/issues/3544
entry = (self.outer_scope and self.outer_scope.lookup(name)) or None
if entry and entry.is_pyglobal:
self._emit_class_private_warning(entry.pos, name)
return entry
|
def lookup(self, name):
# Look up name in this scope or an enclosing one.
# Return None if not found.
name = self.mangle_class_private_name(name)
return (
self.lookup_here(name)
or (self.outer_scope and self.outer_scope.lookup(name))
or None
)
|
https://github.com/cython/cython/issues/3548
|
(bleeding) ✔ ~/source/other_source/pandas [master {pandas/master}|✚ 2]
jupiter@15:19 ➤ pip install -v .
Non-user install because user site-packages disabled
Created temporary directory: /tmp/pip-ephem-wheel-cache-zcmyaxf3
Created temporary directory: /tmp/pip-req-tracker-dfm6xgbp
Initialized build tracking at /tmp/pip-req-tracker-dfm6xgbp
Created build tracker: /tmp/pip-req-tracker-dfm6xgbp
Entered build tracker: /tmp/pip-req-tracker-dfm6xgbp
Created temporary directory: /tmp/pip-install-_tjupc3v
Processing /home/tcaswell/source/other_source/pandas
Created temporary directory: /tmp/pip-req-build-eh2hlgu3
Added file:///home/tcaswell/source/other_source/pandas to build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Running setup.py (path:/tmp/pip-req-build-eh2hlgu3/setup.py) egg_info for package from file:///home/tcaswell/source/other_source/pandas
Running command python setup.py egg_info
warning: pandas/_libs/groupby.pyx:1101:26: Unreachable code
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
ctypedef struct __Pyx_memviewslice:
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
^
------------------------------------------------------------
__pyxutil:15:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
ctypedef unsigned long long ___pyx_uint64_t "__pyx_t_5numpy_uint64_t"
^
------------------------------------------------------------
__pyxutil:17:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
^
------------------------------------------------------------
__pyxutil:16:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 794, in <module>
setup_package()
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 764, in setup_package
ext_modules=maybe_cythonize(extensions, compiler_directives=directives),
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 537, in maybe_cythonize
return cythonize(extensions, *args, **kwargs)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1105, in cythonize
cythonize_one(*args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1263, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: pandas/_libs/interval.pyx
Compiling pandas/_libs/algos.pyx because it changed.
Compiling pandas/_libs/groupby.pyx because it changed.
Compiling pandas/_libs/hashing.pyx because it changed.
Compiling pandas/_libs/hashtable.pyx because it changed.
Compiling pandas/_libs/index.pyx because it changed.
Compiling pandas/_libs/indexing.pyx because it changed.
Compiling pandas/_libs/internals.pyx because it changed.
Compiling pandas/_libs/interval.pyx because it changed.
Compiling pandas/_libs/join.pyx because it changed.
Compiling pandas/_libs/lib.pyx because it changed.
Compiling pandas/_libs/missing.pyx because it changed.
Compiling pandas/_libs/parsers.pyx because it changed.
Compiling pandas/_libs/reduction.pyx because it changed.
Compiling pandas/_libs/ops.pyx because it changed.
Compiling pandas/_libs/ops_dispatch.pyx because it changed.
Compiling pandas/_libs/properties.pyx because it changed.
Compiling pandas/_libs/reshape.pyx because it changed.
Compiling pandas/_libs/sparse.pyx because it changed.
Compiling pandas/_libs/tslib.pyx because it changed.
Compiling pandas/_libs/tslibs/c_timestamp.pyx because it changed.
Compiling pandas/_libs/tslibs/ccalendar.pyx because it changed.
Compiling pandas/_libs/tslibs/conversion.pyx because it changed.
Compiling pandas/_libs/tslibs/fields.pyx because it changed.
Compiling pandas/_libs/tslibs/frequencies.pyx because it changed.
Compiling pandas/_libs/tslibs/nattype.pyx because it changed.
Compiling pandas/_libs/tslibs/np_datetime.pyx because it changed.
Compiling pandas/_libs/tslibs/offsets.pyx because it changed.
Compiling pandas/_libs/tslibs/parsing.pyx because it changed.
Compiling pandas/_libs/tslibs/period.pyx because it changed.
Compiling pandas/_libs/tslibs/resolution.pyx because it changed.
Compiling pandas/_libs/tslibs/strptime.pyx because it changed.
Compiling pandas/_libs/tslibs/timedeltas.pyx because it changed.
Compiling pandas/_libs/tslibs/timestamps.pyx because it changed.
Compiling pandas/_libs/tslibs/timezones.pyx because it changed.
Compiling pandas/_libs/tslibs/tzconversion.pyx because it changed.
Compiling pandas/_libs/testing.pyx because it changed.
Compiling pandas/_libs/window/aggregations.pyx because it changed.
Compiling pandas/_libs/window/indexers.pyx because it changed.
Compiling pandas/_libs/writers.pyx because it changed.
Compiling pandas/io/sas/sas.pyx because it changed.
[ 1/40] Cythonizing pandas/_libs/algos.pyx
[ 2/40] Cythonizing pandas/_libs/groupby.pyx
[ 3/40] Cythonizing pandas/_libs/hashing.pyx
[ 4/40] Cythonizing pandas/_libs/hashtable.pyx
[ 5/40] Cythonizing pandas/_libs/index.pyx
[ 6/40] Cythonizing pandas/_libs/indexing.pyx
[ 7/40] Cythonizing pandas/_libs/internals.pyx
[ 8/40] Cythonizing pandas/_libs/interval.pyx
Cleaning up...
Removing source in /tmp/pip-req-build-eh2hlgu3
Removed file:///home/tcaswell/source/other_source/pandas from build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Removed build tracker: '/tmp/pip-req-tracker-dfm6xgbp'
ERROR: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
Exception information:
Traceback (most recent call last):
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/cli/base_command.py", line 186, in _main
status = self.run(options, args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/commands/install.py", line 331, in run
resolver.resolve(requirement_set)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 177, in resolve
discovered_reqs.extend(self._resolve_one(requirement_set, req))
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 333, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 282, in _get_abstract_dist_for
abstract_dist = self.preparer.prepare_linked_requirement(req)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 515, in prepare_linked_requirement
abstract_dist = _get_prepared_distribution(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 95, in _get_prepared_distribution
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py", line 40, in prepare_distribution_metadata
self.req.prepare_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 564, in prepare_metadata
self.metadata_directory = self._generate_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 538, in _generate_metadata
return generate_metadata_legacy(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py", line 115, in generate_metadata
call_subprocess(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py", line 242, in call_subprocess
raise InstallationError(exc_msg)
pip._internal.exceptions.InstallationError: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
|
Cython.Compiler.Errors.CompileError
|
def lookup_here(self, name):
# Look up in this scope only, return None if not found.
entry = self.entries.get(self.mangle_class_private_name(name), None)
if entry:
return entry
# Also check the unmangled name in the current scope
# (even if mangling should give us something else).
# This is to support things like global __foo which makes a declaration for __foo
return self.entries.get(name, None)
|
def lookup_here(self, name):
# Look up in this scope only, return None if not found.
name = self.mangle_class_private_name(name)
return self.entries.get(name, None)
|
https://github.com/cython/cython/issues/3548
|
(bleeding) ✔ ~/source/other_source/pandas [master {pandas/master}|✚ 2]
jupiter@15:19 ➤ pip install -v .
Non-user install because user site-packages disabled
Created temporary directory: /tmp/pip-ephem-wheel-cache-zcmyaxf3
Created temporary directory: /tmp/pip-req-tracker-dfm6xgbp
Initialized build tracking at /tmp/pip-req-tracker-dfm6xgbp
Created build tracker: /tmp/pip-req-tracker-dfm6xgbp
Entered build tracker: /tmp/pip-req-tracker-dfm6xgbp
Created temporary directory: /tmp/pip-install-_tjupc3v
Processing /home/tcaswell/source/other_source/pandas
Created temporary directory: /tmp/pip-req-build-eh2hlgu3
Added file:///home/tcaswell/source/other_source/pandas to build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Running setup.py (path:/tmp/pip-req-build-eh2hlgu3/setup.py) egg_info for package from file:///home/tcaswell/source/other_source/pandas
Running command python setup.py egg_info
warning: pandas/_libs/groupby.pyx:1101:26: Unreachable code
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
ctypedef struct __Pyx_memviewslice:
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
^
------------------------------------------------------------
__pyxutil:15:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
ctypedef unsigned long long ___pyx_uint64_t "__pyx_t_5numpy_uint64_t"
^
------------------------------------------------------------
__pyxutil:17:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
^
------------------------------------------------------------
__pyxutil:16:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 794, in <module>
setup_package()
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 764, in setup_package
ext_modules=maybe_cythonize(extensions, compiler_directives=directives),
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 537, in maybe_cythonize
return cythonize(extensions, *args, **kwargs)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1105, in cythonize
cythonize_one(*args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1263, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: pandas/_libs/interval.pyx
Compiling pandas/_libs/algos.pyx because it changed.
Compiling pandas/_libs/groupby.pyx because it changed.
Compiling pandas/_libs/hashing.pyx because it changed.
Compiling pandas/_libs/hashtable.pyx because it changed.
Compiling pandas/_libs/index.pyx because it changed.
Compiling pandas/_libs/indexing.pyx because it changed.
Compiling pandas/_libs/internals.pyx because it changed.
Compiling pandas/_libs/interval.pyx because it changed.
Compiling pandas/_libs/join.pyx because it changed.
Compiling pandas/_libs/lib.pyx because it changed.
Compiling pandas/_libs/missing.pyx because it changed.
Compiling pandas/_libs/parsers.pyx because it changed.
Compiling pandas/_libs/reduction.pyx because it changed.
Compiling pandas/_libs/ops.pyx because it changed.
Compiling pandas/_libs/ops_dispatch.pyx because it changed.
Compiling pandas/_libs/properties.pyx because it changed.
Compiling pandas/_libs/reshape.pyx because it changed.
Compiling pandas/_libs/sparse.pyx because it changed.
Compiling pandas/_libs/tslib.pyx because it changed.
Compiling pandas/_libs/tslibs/c_timestamp.pyx because it changed.
Compiling pandas/_libs/tslibs/ccalendar.pyx because it changed.
Compiling pandas/_libs/tslibs/conversion.pyx because it changed.
Compiling pandas/_libs/tslibs/fields.pyx because it changed.
Compiling pandas/_libs/tslibs/frequencies.pyx because it changed.
Compiling pandas/_libs/tslibs/nattype.pyx because it changed.
Compiling pandas/_libs/tslibs/np_datetime.pyx because it changed.
Compiling pandas/_libs/tslibs/offsets.pyx because it changed.
Compiling pandas/_libs/tslibs/parsing.pyx because it changed.
Compiling pandas/_libs/tslibs/period.pyx because it changed.
Compiling pandas/_libs/tslibs/resolution.pyx because it changed.
Compiling pandas/_libs/tslibs/strptime.pyx because it changed.
Compiling pandas/_libs/tslibs/timedeltas.pyx because it changed.
Compiling pandas/_libs/tslibs/timestamps.pyx because it changed.
Compiling pandas/_libs/tslibs/timezones.pyx because it changed.
Compiling pandas/_libs/tslibs/tzconversion.pyx because it changed.
Compiling pandas/_libs/testing.pyx because it changed.
Compiling pandas/_libs/window/aggregations.pyx because it changed.
Compiling pandas/_libs/window/indexers.pyx because it changed.
Compiling pandas/_libs/writers.pyx because it changed.
Compiling pandas/io/sas/sas.pyx because it changed.
[ 1/40] Cythonizing pandas/_libs/algos.pyx
[ 2/40] Cythonizing pandas/_libs/groupby.pyx
[ 3/40] Cythonizing pandas/_libs/hashing.pyx
[ 4/40] Cythonizing pandas/_libs/hashtable.pyx
[ 5/40] Cythonizing pandas/_libs/index.pyx
[ 6/40] Cythonizing pandas/_libs/indexing.pyx
[ 7/40] Cythonizing pandas/_libs/internals.pyx
[ 8/40] Cythonizing pandas/_libs/interval.pyx
Cleaning up...
Removing source in /tmp/pip-req-build-eh2hlgu3
Removed file:///home/tcaswell/source/other_source/pandas from build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Removed build tracker: '/tmp/pip-req-tracker-dfm6xgbp'
ERROR: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
Exception information:
Traceback (most recent call last):
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/cli/base_command.py", line 186, in _main
status = self.run(options, args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/commands/install.py", line 331, in run
resolver.resolve(requirement_set)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 177, in resolve
discovered_reqs.extend(self._resolve_one(requirement_set, req))
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 333, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 282, in _get_abstract_dist_for
abstract_dist = self.preparer.prepare_linked_requirement(req)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 515, in prepare_linked_requirement
abstract_dist = _get_prepared_distribution(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 95, in _get_prepared_distribution
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py", line 40, in prepare_distribution_metadata
self.req.prepare_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 564, in prepare_metadata
self.metadata_directory = self._generate_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 538, in _generate_metadata
return generate_metadata_legacy(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py", line 115, in generate_metadata
call_subprocess(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py", line 242, in call_subprocess
raise InstallationError(exc_msg)
pip._internal.exceptions.InstallationError: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
|
Cython.Compiler.Errors.CompileError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.