after_merge
stringlengths 64
17k
| before_merge
stringlengths 60
17k
| source code and errors
stringlengths 236
32.3k
| full_traceback
stringlengths 170
17.7k
| traceback_type
stringclasses 60
values |
|---|---|---|---|---|
def cmdb_get_mainline_object_topo(request, bk_biz_id, bk_supplier_account=''):
"""
@summary: 获取配置平台业务拓扑模型
@param request:
@param bk_biz_id:
@param bk_supplier_account:
@return:
"""
kwargs = {
'bk_biz_id': bk_biz_id,
'bk_supplier_account': bk_supplier_account,
}
client = get_client_by_user(request.user.username)
cc_result = client.cc.get_mainline_object_topo(kwargs)
if not cc_result['result']:
message = handle_api_error(_(u"配置平台(CMDB)"),
'cc.get_mainline_object_topo',
kwargs,
cc_result['message'])
return {'result': cc_result['result'], 'code': cc_result['code'], 'message': message}
data = cc_result['data']
for bk_obj in data:
if bk_obj['bk_obj_id'] == 'host':
bk_obj['bk_obj_name'] = 'IP'
result = {'result': cc_result['result'], 'code': cc_result['code'], 'data': cc_result['data']}
return JsonResponse(result)
|
def cmdb_get_mainline_object_topo(request, bk_biz_id, bk_supplier_account=''):
"""
@summary: 获取配置平台业务拓扑模型
@param request:
@param bk_biz_id:
@param bk_supplier_account:
@return:
"""
kwargs = {
'bk_biz_id': bk_biz_id,
'bk_supplier_account': bk_supplier_account,
}
client = get_client_by_request(request)
cc_result = client.cc.get_mainline_object_topo(kwargs)
if not cc_result['result']:
message = handle_api_error(_(u"配置平台(CMDB)"),
'cc.get_mainline_object_topo',
kwargs,
cc_result['message'])
return {'result': cc_result['result'], 'code': cc_result['code'], 'message': message}
data = cc_result['data']
for bk_obj in data:
if bk_obj['bk_obj_id'] == 'host':
bk_obj['bk_obj_name'] = 'IP'
result = {'result': cc_result['result'], 'code': cc_result['code'], 'data': cc_result['data']}
return JsonResponse(result)
|
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line\\nutility.execute()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute\\ndjango.setup()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup\\napps.populate(settings.INSTALLED_APPS)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate\\napp_config = AppConfig.create(entry)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create\\nmod = import_module(mod_path)\\nFile "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module\\n__import__(name)\\nFile "/data/app/code/pipeline/apps.py", line 18, in <module>\\nfrom rediscluster import StrictRedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>\\nfrom .client import StrictRedisCluster, RedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>\\nfrom .connection import (\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>\\nfrom .nodemanager import NodeManager\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>\\nfrom redis._compat import b, unicode, bytes, long, basestring\\nImportError: cannot import name b\\n------FAILURE: Migrate Database------'}]
|
------STARTING: Migrate Database------
Traceback (most recent call last):
File "manage.py", line 27, in <module>
execute_from_command_line(sys.argv)
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute
django.setup()
File "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup
apps.populate(settings.INSTALLED_APPS)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate
app_config = AppConfig.create(entry)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create
mod = import_module(mod_path)
File "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/data/app/code/pipeline/apps.py", line 18, in <module>
from rediscluster import StrictRedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>
from .client import StrictRedisCluster, RedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>
from .connection import (
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>
from .nodemanager import NodeManager
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>
from redis._compat import b, unicode, bytes, long, basestring
ImportError: cannot import name b
------FAILURE: Migrate Database------
|
ImportError
|
def cc_search_object_attribute(request, obj_id, biz_cc_id, supplier_account):
"""
@summary: 获取对象自定义属性
@param request:
@param biz_cc_id:
@return:
"""
client = get_client_by_user(request.user.username)
kwargs = {
'bk_obj_id': obj_id,
'bk_supplier_account': supplier_account
}
cc_result = client.cc.search_object_attribute(kwargs)
if not cc_result['result']:
message = handle_api_error('cc', 'cc.search_object_attribute', kwargs, cc_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
obj_property = []
for item in cc_result['data']:
if item['editable']:
obj_property.append({
'value': item['bk_property_id'],
'text': item['bk_property_name']
})
return JsonResponse({'result': True, 'data': obj_property})
|
def cc_search_object_attribute(request, obj_id, biz_cc_id, supplier_account):
"""
@summary: 获取对象自定义属性
@param request:
@param biz_cc_id:
@return:
"""
client = get_client_by_request(request)
kwargs = {
'bk_obj_id': obj_id,
'bk_supplier_account': supplier_account
}
cc_result = client.cc.search_object_attribute(kwargs)
if not cc_result['result']:
message = handle_api_error('cc', 'cc.search_object_attribute', kwargs, cc_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
obj_property = []
for item in cc_result['data']:
if item['editable']:
obj_property.append({
'value': item['bk_property_id'],
'text': item['bk_property_name']
})
return JsonResponse({'result': True, 'data': obj_property})
|
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line\\nutility.execute()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute\\ndjango.setup()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup\\napps.populate(settings.INSTALLED_APPS)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate\\napp_config = AppConfig.create(entry)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create\\nmod = import_module(mod_path)\\nFile "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module\\n__import__(name)\\nFile "/data/app/code/pipeline/apps.py", line 18, in <module>\\nfrom rediscluster import StrictRedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>\\nfrom .client import StrictRedisCluster, RedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>\\nfrom .connection import (\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>\\nfrom .nodemanager import NodeManager\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>\\nfrom redis._compat import b, unicode, bytes, long, basestring\\nImportError: cannot import name b\\n------FAILURE: Migrate Database------'}]
|
------STARTING: Migrate Database------
Traceback (most recent call last):
File "manage.py", line 27, in <module>
execute_from_command_line(sys.argv)
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute
django.setup()
File "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup
apps.populate(settings.INSTALLED_APPS)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate
app_config = AppConfig.create(entry)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create
mod = import_module(mod_path)
File "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/data/app/code/pipeline/apps.py", line 18, in <module>
from rediscluster import StrictRedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>
from .client import StrictRedisCluster, RedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>
from .connection import (
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>
from .nodemanager import NodeManager
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>
from redis._compat import b, unicode, bytes, long, basestring
ImportError: cannot import name b
------FAILURE: Migrate Database------
|
ImportError
|
def cc_search_create_object_attribute(request, obj_id, biz_cc_id, supplier_account):
client = get_client_by_user(request.user.username)
kwargs = {
'bk_obj_id': obj_id,
'bk_supplier_account': supplier_account
}
cc_result = client.cc.search_object_attribute(kwargs)
if not cc_result['result']:
message = handle_api_error('cc', 'cc.search_object_attribute', kwargs, cc_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
obj_property = []
for item in cc_result['data']:
if item['editable']:
prop_dict = {
'tag_code': item['bk_property_id'],
'type': "input",
'attrs': {
'name': item['bk_property_name'],
'editable': 'true',
},
}
if item['bk_property_id'] in ['bk_set_name']:
prop_dict["attrs"]["validation"] = [
{
"type": "required"
}
]
obj_property.append(prop_dict)
return JsonResponse({'result': True, 'data': obj_property})
|
def cc_search_create_object_attribute(request, obj_id, biz_cc_id, supplier_account):
client = get_client_by_request(request)
kwargs = {
'bk_obj_id': obj_id,
'bk_supplier_account': supplier_account
}
cc_result = client.cc.search_object_attribute(kwargs)
if not cc_result['result']:
message = handle_api_error('cc', 'cc.search_object_attribute', kwargs, cc_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
obj_property = []
for item in cc_result['data']:
if item['editable']:
prop_dict = {
'tag_code': item['bk_property_id'],
'type': "input",
'attrs': {
'name': item['bk_property_name'],
'editable': 'true',
},
}
if item['bk_property_id'] in ['bk_set_name']:
prop_dict["attrs"]["validation"] = [
{
"type": "required"
}
]
obj_property.append(prop_dict)
return JsonResponse({'result': True, 'data': obj_property})
|
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line\\nutility.execute()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute\\ndjango.setup()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup\\napps.populate(settings.INSTALLED_APPS)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate\\napp_config = AppConfig.create(entry)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create\\nmod = import_module(mod_path)\\nFile "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module\\n__import__(name)\\nFile "/data/app/code/pipeline/apps.py", line 18, in <module>\\nfrom rediscluster import StrictRedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>\\nfrom .client import StrictRedisCluster, RedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>\\nfrom .connection import (\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>\\nfrom .nodemanager import NodeManager\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>\\nfrom redis._compat import b, unicode, bytes, long, basestring\\nImportError: cannot import name b\\n------FAILURE: Migrate Database------'}]
|
------STARTING: Migrate Database------
Traceback (most recent call last):
File "manage.py", line 27, in <module>
execute_from_command_line(sys.argv)
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute
django.setup()
File "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup
apps.populate(settings.INSTALLED_APPS)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate
app_config = AppConfig.create(entry)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create
mod = import_module(mod_path)
File "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/data/app/code/pipeline/apps.py", line 18, in <module>
from rediscluster import StrictRedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>
from .client import StrictRedisCluster, RedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>
from .connection import (
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>
from .nodemanager import NodeManager
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>
from redis._compat import b, unicode, bytes, long, basestring
ImportError: cannot import name b
------FAILURE: Migrate Database------
|
ImportError
|
def cc_search_topo(request, obj_id, category, biz_cc_id, supplier_account):
"""
@summary: 查询对象拓扑
@param request:
@param biz_cc_id:
@return:
"""
client = get_client_by_user(request.user.username)
kwargs = {
'bk_biz_id': biz_cc_id,
'bk_supplier_account': supplier_account
}
cc_result = client.cc.search_biz_inst_topo(kwargs)
if not cc_result['result']:
message = handle_api_error('cc', 'cc.search_biz_inst_topo', kwargs, cc_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
if category in ["normal", "prev", "picker"]:
cc_topo = cc_format_topo_data(cc_result['data'], obj_id, category)
else:
cc_topo = []
return JsonResponse({'result': True, 'data': cc_topo})
|
def cc_search_topo(request, obj_id, category, biz_cc_id, supplier_account):
"""
@summary: 查询对象拓扑
@param request:
@param biz_cc_id:
@return:
"""
client = get_client_by_request(request)
kwargs = {
'bk_biz_id': biz_cc_id,
'bk_supplier_account': supplier_account
}
cc_result = client.cc.search_biz_inst_topo(kwargs)
if not cc_result['result']:
message = handle_api_error('cc', 'cc.search_biz_inst_topo', kwargs, cc_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
if category in ["normal", "prev", "picker"]:
cc_topo = cc_format_topo_data(cc_result['data'], obj_id, category)
else:
cc_topo = []
return JsonResponse({'result': True, 'data': cc_topo})
|
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line\\nutility.execute()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute\\ndjango.setup()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup\\napps.populate(settings.INSTALLED_APPS)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate\\napp_config = AppConfig.create(entry)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create\\nmod = import_module(mod_path)\\nFile "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module\\n__import__(name)\\nFile "/data/app/code/pipeline/apps.py", line 18, in <module>\\nfrom rediscluster import StrictRedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>\\nfrom .client import StrictRedisCluster, RedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>\\nfrom .connection import (\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>\\nfrom .nodemanager import NodeManager\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>\\nfrom redis._compat import b, unicode, bytes, long, basestring\\nImportError: cannot import name b\\n------FAILURE: Migrate Database------'}]
|
------STARTING: Migrate Database------
Traceback (most recent call last):
File "manage.py", line 27, in <module>
execute_from_command_line(sys.argv)
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute
django.setup()
File "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup
apps.populate(settings.INSTALLED_APPS)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate
app_config = AppConfig.create(entry)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create
mod = import_module(mod_path)
File "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/data/app/code/pipeline/apps.py", line 18, in <module>
from rediscluster import StrictRedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>
from .client import StrictRedisCluster, RedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>
from .connection import (
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>
from .nodemanager import NodeManager
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>
from redis._compat import b, unicode, bytes, long, basestring
ImportError: cannot import name b
------FAILURE: Migrate Database------
|
ImportError
|
def job_get_script_list(request, biz_cc_id):
"""
查询业务脚本列表
:param request:
:param biz_cc_id:
:return:
"""
# 查询脚本列表
client = get_client_by_user(request.user.username)
script_type = request.GET.get('type')
kwargs = {
'bk_biz_id': biz_cc_id,
'is_public': True if script_type == 'public' else False
}
script_result = client.job.get_script_list(kwargs)
if not script_result['result']:
message = handle_api_error('cc', 'job.get_script_list', kwargs, script_result['message'])
logger.error(message)
result = {
'result': False,
'message': message
}
return JsonResponse(result)
script_dict = {}
for script in script_result['data']['data']:
script_dict.setdefault(script['name'], []).append(script['id'])
version_data = []
for name, version in script_dict.items():
version_data.append({
"text": name,
"value": max(version)
})
return JsonResponse({'result': True, 'data': version_data})
|
def job_get_script_list(request, biz_cc_id):
"""
查询业务脚本列表
:param request:
:param biz_cc_id:
:return:
"""
# 查询脚本列表
client = get_client_by_request(request)
script_type = request.GET.get('type')
kwargs = {
'bk_biz_id': biz_cc_id,
'is_public': True if script_type == 'public' else False
}
script_result = client.job.get_script_list(kwargs)
if not script_result['result']:
message = handle_api_error('cc', 'job.get_script_list', kwargs, script_result['message'])
logger.error(message)
result = {
'result': False,
'message': message
}
return JsonResponse(result)
script_dict = {}
for script in script_result['data']['data']:
script_dict.setdefault(script['name'], []).append(script['id'])
version_data = []
for name, version in script_dict.items():
version_data.append({
"text": name,
"value": max(version)
})
return JsonResponse({'result': True, 'data': version_data})
|
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line\\nutility.execute()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute\\ndjango.setup()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup\\napps.populate(settings.INSTALLED_APPS)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate\\napp_config = AppConfig.create(entry)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create\\nmod = import_module(mod_path)\\nFile "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module\\n__import__(name)\\nFile "/data/app/code/pipeline/apps.py", line 18, in <module>\\nfrom rediscluster import StrictRedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>\\nfrom .client import StrictRedisCluster, RedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>\\nfrom .connection import (\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>\\nfrom .nodemanager import NodeManager\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>\\nfrom redis._compat import b, unicode, bytes, long, basestring\\nImportError: cannot import name b\\n------FAILURE: Migrate Database------'}]
|
------STARTING: Migrate Database------
Traceback (most recent call last):
File "manage.py", line 27, in <module>
execute_from_command_line(sys.argv)
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute
django.setup()
File "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup
apps.populate(settings.INSTALLED_APPS)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate
app_config = AppConfig.create(entry)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create
mod = import_module(mod_path)
File "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/data/app/code/pipeline/apps.py", line 18, in <module>
from rediscluster import StrictRedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>
from .client import StrictRedisCluster, RedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>
from .connection import (
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>
from .nodemanager import NodeManager
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>
from redis._compat import b, unicode, bytes, long, basestring
ImportError: cannot import name b
------FAILURE: Migrate Database------
|
ImportError
|
def job_get_job_tasks_by_biz(request, biz_cc_id):
client = get_client_by_user(request.user.username)
job_result = client.job.get_job_list({'bk_biz_id': biz_cc_id})
if not job_result['result']:
message = _(u"查询作业平台(JOB)的作业模板[app_id=%s]接口job.get_task返回失败: %s") % (
biz_cc_id, job_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
task_list = []
for task in job_result['data']:
task_list.append({
'value': task['bk_job_id'],
'text': task['name'],
})
return JsonResponse({'result': True, 'data': task_list})
|
def job_get_job_tasks_by_biz(request, biz_cc_id):
client = get_client_by_request(request)
job_result = client.job.get_job_list({'bk_biz_id': biz_cc_id})
if not job_result['result']:
message = _(u"查询作业平台(JOB)的作业模板[app_id=%s]接口job.get_task返回失败: %s") % (
biz_cc_id, job_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
task_list = []
for task in job_result['data']:
task_list.append({
'value': task['bk_job_id'],
'text': task['name'],
})
return JsonResponse({'result': True, 'data': task_list})
|
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line\\nutility.execute()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute\\ndjango.setup()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup\\napps.populate(settings.INSTALLED_APPS)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate\\napp_config = AppConfig.create(entry)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create\\nmod = import_module(mod_path)\\nFile "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module\\n__import__(name)\\nFile "/data/app/code/pipeline/apps.py", line 18, in <module>\\nfrom rediscluster import StrictRedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>\\nfrom .client import StrictRedisCluster, RedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>\\nfrom .connection import (\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>\\nfrom .nodemanager import NodeManager\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>\\nfrom redis._compat import b, unicode, bytes, long, basestring\\nImportError: cannot import name b\\n------FAILURE: Migrate Database------'}]
|
------STARTING: Migrate Database------
Traceback (most recent call last):
File "manage.py", line 27, in <module>
execute_from_command_line(sys.argv)
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute
django.setup()
File "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup
apps.populate(settings.INSTALLED_APPS)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate
app_config = AppConfig.create(entry)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create
mod = import_module(mod_path)
File "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/data/app/code/pipeline/apps.py", line 18, in <module>
from rediscluster import StrictRedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>
from .client import StrictRedisCluster, RedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>
from .connection import (
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>
from .nodemanager import NodeManager
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>
from redis._compat import b, unicode, bytes, long, basestring
ImportError: cannot import name b
------FAILURE: Migrate Database------
|
ImportError
|
def job_get_job_task_detail(request, biz_cc_id, task_id):
client = get_client_by_user(request.user.username)
job_result = client.job.get_job_detail({'bk_biz_id': biz_cc_id,
'bk_job_id': task_id})
if not job_result['result']:
message = _(u"查询作业平台(JOB)的作业模板详情[app_id=%s]接口job.get_task_detail返回失败: %s") % (
biz_cc_id, job_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
job_step_type_name = {
1: _(u"脚本"),
2: _(u"文件"),
4: u"SQL"
}
task_detail = job_result['data']
global_var = []
steps = []
for var in task_detail.get('global_vars', []):
# 1-字符串, 2-IP, 3-索引数组, 4-关联数组
if var['type'] in [JOB_VAR_TYPE_STR, JOB_VAR_TYPE_IP, JOB_VAR_TYPE_ARRAY]:
value = var.get('value', '')
else:
value = ['{plat_id}:{ip}'.format(plat_id=ip_item['bk_cloud_id'], ip=ip_item['ip'])
for ip_item in var.get('ip_list', [])]
global_var.append({
'id': var['id'],
# 全局变量类型:1:云参, 2:上下文参数,3:IP
'category': var.get('category', 1),
'name': var['name'],
'type': var['type'],
'value': value,
'description': var['description']
})
for info in task_detail.get('steps', []):
# 1-执行脚本, 2-传文件, 4-传SQL
steps.append({
'stepId': info['step_id'],
'name': info['name'],
'scriptParams': info.get('script_param', ''),
'account': info.get('account', ''),
'ipList': '',
'type': info['type'],
'type_name': job_step_type_name.get(info['type'], info['type'])
})
return JsonResponse({'result': True, 'data': {'global_var': global_var, 'steps': steps}})
|
def job_get_job_task_detail(request, biz_cc_id, task_id):
client = get_client_by_request(request)
job_result = client.job.get_job_detail({'bk_biz_id': biz_cc_id,
'bk_job_id': task_id})
if not job_result['result']:
message = _(u"查询作业平台(JOB)的作业模板详情[app_id=%s]接口job.get_task_detail返回失败: %s") % (
biz_cc_id, job_result['message'])
logger.error(message)
result = {
'result': False,
'data': [],
'message': message
}
return JsonResponse(result)
job_step_type_name = {
1: _(u"脚本"),
2: _(u"文件"),
4: u"SQL"
}
task_detail = job_result['data']
global_var = []
steps = []
for var in task_detail.get('global_vars', []):
# 1-字符串, 2-IP, 3-索引数组, 4-关联数组
if var['type'] in [JOB_VAR_TYPE_STR, JOB_VAR_TYPE_IP, JOB_VAR_TYPE_ARRAY]:
value = var.get('value', '')
else:
value = ['{plat_id}:{ip}'.format(plat_id=ip_item['bk_cloud_id'], ip=ip_item['ip'])
for ip_item in var.get('ip_list', [])]
global_var.append({
'id': var['id'],
# 全局变量类型:1:云参, 2:上下文参数,3:IP
'category': var.get('category', 1),
'name': var['name'],
'type': var['type'],
'value': value,
'description': var['description']
})
for info in task_detail.get('steps', []):
# 1-执行脚本, 2-传文件, 4-传SQL
steps.append({
'stepId': info['step_id'],
'name': info['name'],
'scriptParams': info.get('script_param', ''),
'account': info.get('account', ''),
'ipList': '',
'type': info['type'],
'type_name': job_step_type_name.get(info['type'], info['type'])
})
return JsonResponse({'result': True, 'data': {'global_var': global_var, 'steps': steps}})
|
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line\\nutility.execute()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute\\ndjango.setup()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup\\napps.populate(settings.INSTALLED_APPS)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate\\napp_config = AppConfig.create(entry)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create\\nmod = import_module(mod_path)\\nFile "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module\\n__import__(name)\\nFile "/data/app/code/pipeline/apps.py", line 18, in <module>\\nfrom rediscluster import StrictRedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>\\nfrom .client import StrictRedisCluster, RedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>\\nfrom .connection import (\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>\\nfrom .nodemanager import NodeManager\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>\\nfrom redis._compat import b, unicode, bytes, long, basestring\\nImportError: cannot import name b\\n------FAILURE: Migrate Database------'}]
|
------STARTING: Migrate Database------
Traceback (most recent call last):
File "manage.py", line 27, in <module>
execute_from_command_line(sys.argv)
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute
django.setup()
File "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup
apps.populate(settings.INSTALLED_APPS)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate
app_config = AppConfig.create(entry)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create
mod = import_module(mod_path)
File "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/data/app/code/pipeline/apps.py", line 18, in <module>
from rediscluster import StrictRedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>
from .client import StrictRedisCluster, RedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>
from .connection import (
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>
from .nodemanager import NodeManager
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>
from redis._compat import b, unicode, bytes, long, basestring
ImportError: cannot import name b
------FAILURE: Migrate Database------
|
ImportError
|
def get_bk_user(request):
bkuser = None
if request.weixin_user and not isinstance(request.weixin_user, AnonymousUser):
user_model = get_user_model()
try:
user_property = UserProperty.objects.get(key='wx_userid', value=request.weixin_user.userid)
except UserProperty.DoesNotExist:
logger.warning('user[wx_userid=%s] not in UserProperty' % request.weixin_user.userid)
else:
bkuser = user_model.objects.get(username=user_property.user.username)
return bkuser or AnonymousUser()
|
def get_bk_user(request):
bkuser = None
if request.weixin_user and not isinstance(request.weixin_user, AnonymousUser):
try:
user_property = UserProperty.objects.get(key='wx_userid', value=request.weixin_user.userid)
bkuser = user_property.user
except UserProperty.DoesNotExist:
bkuser = None
return bkuser or AnonymousUser()
|
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line\\nutility.execute()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute\\ndjango.setup()\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup\\napps.populate(settings.INSTALLED_APPS)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate\\napp_config = AppConfig.create(entry)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create\\nmod = import_module(mod_path)\\nFile "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module\\n__import__(name)\\nFile "/data/app/code/pipeline/apps.py", line 18, in <module>\\nfrom rediscluster import StrictRedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>\\nfrom .client import StrictRedisCluster, RedisCluster\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>\\nfrom .connection import (\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>\\nfrom .nodemanager import NodeManager\\nFile "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>\\nfrom redis._compat import b, unicode, bytes, long, basestring\\nImportError: cannot import name b\\n------FAILURE: Migrate Database------'}]
|
------STARTING: Migrate Database------
Traceback (most recent call last):
File "manage.py", line 27, in <module>
execute_from_command_line(sys.argv)
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 328, in execute
django.setup()
File "/cache/.bk/env/lib/python2.7/site-packages/django/__init__.py", line 18, in setup
apps.populate(settings.INSTALLED_APPS)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/registry.py", line 85, in populate
app_config = AppConfig.create(entry)
File "/cache/.bk/env/lib/python2.7/site-packages/django/apps/config.py", line 112, in create
mod = import_module(mod_path)
File "/cache/.bk/env/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/data/app/code/pipeline/apps.py", line 18, in <module>
from rediscluster import StrictRedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/__init__.py", line 7, in <module>
from .client import StrictRedisCluster, RedisCluster
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/client.py", line 10, in <module>
from .connection import (
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/connection.py", line 11, in <module>
from .nodemanager import NodeManager
File "/cache/.bk/env/lib/python2.7/site-packages/rediscluster/nodemanager.py", line 12, in <module>
from redis._compat import b, unicode, bytes, long, basestring
ImportError: cannot import name b
------FAILURE: Migrate Database------
|
ImportError
|
def fit(self, dataset: Dataset):
"""Calculates statistics for this workflow on the input dataset
Parameters
-----------
dataset: Dataset
The input dataset to calculate statistics for. If there is a train/test split this
data should be the training dataset only.
"""
self._clear_worker_cache()
ddf = dataset.to_ddf(columns=self._input_columns())
# Get a dictionary mapping all StatOperators we need to fit to a set of any dependant
# StatOperators (having StatOperators that depend on the output of other StatOperators
# means that will have multiple phases in the fit cycle here)
stat_ops = {op: _get_stat_ops(op.parents) for op in _get_stat_ops([self.column_group])}
while stat_ops:
# get all the StatOperators that we can currently call fit on (no outstanding
# dependencies)
current_phase = [op for op, dependencies in stat_ops.items() if not dependencies]
if not current_phase:
# this shouldn't happen, but lets not infinite loop just in case
raise RuntimeError("failed to find dependency-free StatOperator to fit")
stats, ops = [], []
for column_group in current_phase:
# apply transforms necessary for the inputs to the current column group, ignoring
# the transforms from the statop itself
transformed_ddf = _transform_ddf(ddf, column_group.parents)
op = column_group.op
try:
stats.append(op.fit(column_group.input_column_names, transformed_ddf))
ops.append(op)
except Exception:
LOG.exception("Failed to fit operator %s", column_group.op)
raise
if self.client:
results = [r.result() for r in self.client.compute(stats)]
else:
results = dask.compute(stats, scheduler="synchronous")[0]
for computed_stats, op in zip(results, ops):
op.fit_finalize(computed_stats)
# Remove all the operators we processed in this phase, and remove
# from the dependencies of other ops too
for stat_op in current_phase:
stat_ops.pop(stat_op)
for dependencies in stat_ops.values():
dependencies.difference_update(current_phase)
# hack: store input/output dtypes here. We should have complete dtype
# information for each operator (like we do for column names), but as
# an interim solution this gets us what we need.
input_dtypes = dataset.to_ddf()[self._input_columns()].dtypes
self.input_dtypes = dict(zip(input_dtypes.index, input_dtypes))
output_dtypes = self.transform(dataset).to_ddf().head(1).dtypes
self.output_dtypes = dict(zip(output_dtypes.index, output_dtypes))
|
def fit(self, dataset: Dataset):
"""Calculates statistics for this workflow on the input dataset
Parameters
-----------
dataset: Dataset
The input dataset to calculate statistics for. If there is a train/test split this
data should be the training dataset only.
"""
self._clear_worker_cache()
ddf = dataset.to_ddf(columns=self._input_columns())
# Get a dictionary mapping all StatOperators we need to fit to a set of any dependant
# StatOperators (having StatOperators that depend on the output of other StatOperators
# means that will have multiple phases in the fit cycle here)
stat_ops = {op: _get_stat_ops(op.parents) for op in _get_stat_ops([self.column_group])}
while stat_ops:
# get all the StatOperators that we can currently call fit on (no outstanding
# dependencies)
current_phase = [op for op, dependencies in stat_ops.items() if not dependencies]
if not current_phase:
# this shouldn't happen, but lets not infinite loop just in case
raise RuntimeError("failed to find dependency-free StatOperator to fit")
stats, ops = [], []
for column_group in current_phase:
# apply transforms necessary for the inputs to the current column group, ignoring
# the transforms from the statop itself
transformed_ddf = _transform_ddf(ddf, column_group.parents)
op = column_group.op
try:
stats.append(op.fit(column_group.input_column_names, transformed_ddf))
ops.append(op)
except Exception:
LOG.exception("Failed to fit operator %s", column_group.op)
raise
if self.client:
results = [r.result() for r in self.client.compute(stats)]
else:
results = dask.compute(stats, scheduler="synchronous")[0]
for computed_stats, op in zip(results, ops):
op.fit_finalize(computed_stats)
# Remove all the operators we processed in this phase, and remove
# from the dependencies of other ops too
for stat_op in current_phase:
stat_ops.pop(stat_op)
for dependencies in stat_ops.values():
dependencies.difference_update(current_phase)
# hack: store input/output dtypes here. We should have complete dtype
# information for each operator (like we do for column names), but as
# an interim solution this gets us what we need.
input_dtypes = dataset.to_ddf().dtypes
self.input_dtypes = dict(zip(input_dtypes.index, input_dtypes))
output_dtypes = self.transform(dataset).to_ddf().head(1).dtypes
self.output_dtypes = dict(zip(output_dtypes.index, output_dtypes))
|
[{'piece_type': 'error message', 'piece_content': 'E0224 15:58:10.330248 178 model_repository_manager.cc:963] failed to load \\'amazonreview_tf\\' version 1: Internal: unable to create stream: the provided PTX was compiled with an unsupported toolchain.\\n/nvtabular/nvtabular/workflow.py:236: UserWarning: Loading workflow generated with cudf version 0+untagged.1.gbd321d1 - but we are running cudf 0.18.0a+253.g53ed28e91c. This might cause issues\\nwarnings.warn(\\nE0224 15:58:20.534884 178 model_repository_manager.cc:963] failed to load \\'amazonreview_nvt\\' version 1: Internal: Traceback (most recent call last):\\nFile "/opt/tritonserver/backends/python/startup.py", line 197, in Init\\nself.backend.initialize(args)\\nFile "/models/models/amazonreview_nvt/1/model.py", line 57, in initialize\\nself.output_dtypes[name] = triton_string_to_numpy(conf["data_type"])\\nTypeError: \\'NoneType\\' object is not subscriptable\\nI0224 15:58:20.535093 178 server.cc:490]'}]
|
E0224 15:58:10.330248 178 model_repository_manager.cc:963] failed to load 'amazonreview_tf' version 1: Internal: unable to create stream: the provided PTX was compiled with an unsupported toolchain.
/nvtabular/nvtabular/workflow.py:236: UserWarning: Loading workflow generated with cudf version 0+untagged.1.gbd321d1 - but we are running cudf 0.18.0a+253.g53ed28e91c. This might cause issues
warnings.warn(
E0224 15:58:20.534884 178 model_repository_manager.cc:963] failed to load 'amazonreview_nvt' version 1: Internal: Traceback (most recent call last):
File "/opt/tritonserver/backends/python/startup.py", line 197, in Init
self.backend.initialize(args)
File "/models/models/amazonreview_nvt/1/model.py", line 57, in initialize
self.output_dtypes[name] = triton_string_to_numpy(conf["data_type"])
TypeError: 'NoneType' object is not subscriptable
I0224 15:58:20.535093 178 server.cc:490]
|
TypeError
|
def main(args):
"""Multi-GPU Criteo/DLRM Preprocessing Benchmark
This benchmark is designed to measure the time required to preprocess
the Criteo (1TB) dataset for Facebook’s DLRM model. The user must specify
the path of the raw dataset (using the `--data-path` flag), as well as the
output directory for all temporary/final data (using the `--out-path` flag)
Example Usage
-------------
python dask-nvtabular-criteo-benchmark.py
--data-path /path/to/criteo_parquet --out-path /out/dir/`
Dataset Requirements (Parquet)
------------------------------
This benchmark is designed with a parquet-formatted dataset in mind.
While a CSV-formatted dataset can be processed by NVTabular, converting
to parquet will yield significantly better performance. To convert your
dataset, try using the `optimize_criteo.ipynb` notebook (also located
in `NVTabular/examples/`)
For a detailed parameter overview see `NVTabular/examples/MultiGPUBench.md`
"""
# Input
data_path = args.data_path[:-1] if args.data_path[-1] == "/" else args.data_path
freq_limit = args.freq_limit
out_files_per_proc = args.out_files_per_proc
high_card_columns = args.high_cards.split(",")
dashboard_port = args.dashboard_port
if args.protocol == "ucx":
UCX_TLS = os.environ.get("UCX_TLS", "tcp,cuda_copy,cuda_ipc,sockcm")
os.environ["UCX_TLS"] = UCX_TLS
# Cleanup output directory
base_dir = args.out_path[:-1] if args.out_path[-1] == "/" else args.out_path
dask_workdir = os.path.join(base_dir, "workdir")
output_path = os.path.join(base_dir, "output")
stats_path = os.path.join(base_dir, "stats")
setup_dirs(base_dir, dask_workdir, output_path, stats_path)
# Use Criteo dataset by default (for now)
cont_names = (
args.cont_names.split(",") if args.cont_names else ["I" + str(x) for x in range(1, 14)]
)
cat_names = (
args.cat_names.split(",") if args.cat_names else ["C" + str(x) for x in range(1, 27)]
)
label_name = ["label"]
# Specify Categorify/GroupbyStatistics options
tree_width = {}
cat_cache = {}
for col in cat_names:
if col in high_card_columns:
tree_width[col] = args.tree_width
cat_cache[col] = args.cat_cache_high
else:
tree_width[col] = 1
cat_cache[col] = args.cat_cache_low
# Use total device size to calculate args.device_limit_frac
device_size = device_mem_size(kind="total")
device_limit = int(args.device_limit_frac * device_size)
device_pool_size = int(args.device_pool_frac * device_size)
part_size = int(args.part_mem_frac * device_size)
# Parse shuffle option
shuffle = None
if args.shuffle == "PER_WORKER":
shuffle = nvt_io.Shuffle.PER_WORKER
elif args.shuffle == "PER_PARTITION":
shuffle = nvt_io.Shuffle.PER_PARTITION
# Check if any device memory is already occupied
for dev in args.devices.split(","):
fmem = _pynvml_mem_size(kind="free", index=int(dev))
used = (device_size - fmem) / 1e9
if used > 1.0:
warnings.warn(f"BEWARE - {used} GB is already occupied on device {int(dev)}!")
# Setup LocalCUDACluster
if args.protocol == "tcp":
cluster = LocalCUDACluster(
protocol=args.protocol,
n_workers=args.n_workers,
CUDA_VISIBLE_DEVICES=args.devices,
device_memory_limit=device_limit,
local_directory=dask_workdir,
dashboard_address=":" + dashboard_port,
)
else:
cluster = LocalCUDACluster(
protocol=args.protocol,
n_workers=args.n_workers,
CUDA_VISIBLE_DEVICES=args.devices,
enable_nvlink=True,
device_memory_limit=device_limit,
local_directory=dask_workdir,
dashboard_address=":" + dashboard_port,
)
client = Client(cluster)
# Setup RMM pool
if args.device_pool_frac > 0.01:
setup_rmm_pool(client, device_pool_size)
# Define Dask NVTabular "Workflow"
if args.normalize:
cont_features = cont_names >> ops.FillMissing() >> ops.Normalize()
else:
cont_features = cont_names >> ops.FillMissing() >> ops.Clip(min_value=0) >> ops.LogOp()
cat_features = cat_names >> ops.Categorify(
out_path=stats_path,
tree_width=tree_width,
cat_cache=cat_cache,
freq_threshold=freq_limit,
search_sorted=not freq_limit,
on_host=not args.cats_on_device,
)
processor = Workflow(cat_features + cont_features + label_name, client=client)
dataset = Dataset(data_path, "parquet", part_size=part_size)
# Execute the dask graph
runtime = time.time()
processor.fit(dataset)
if args.profile is not None:
with performance_report(filename=args.profile):
processor.transform(dataset).to_parquet(
output_path=output_path,
num_threads=args.num_io_threads,
shuffle=shuffle,
out_files_per_proc=out_files_per_proc,
)
else:
processor.transform(dataset).to_parquet(
output_path=output_path,
num_threads=args.num_io_threads,
shuffle=shuffle,
out_files_per_proc=out_files_per_proc,
)
runtime = time.time() - runtime
print("\\nDask-NVTabular DLRM/Criteo benchmark")
print("--------------------------------------")
print(f"partition size | {part_size}")
print(f"protocol | {args.protocol}")
print(f"device(s) | {args.devices}")
print(f"rmm-pool-frac | {(args.device_pool_frac)}")
print(f"out-files-per-proc | {args.out_files_per_proc}")
print(f"num_io_threads | {args.num_io_threads}")
print(f"shuffle | {args.shuffle}")
print(f"cats-on-device | {args.cats_on_device}")
print("======================================")
print(f"Runtime[s] | {runtime}")
print("======================================\\n")
client.close()
|
def main(args):
"""Multi-GPU Criteo/DLRM Preprocessing Benchmark
This benchmark is designed to measure the time required to preprocess
the Criteo (1TB) dataset for Facebook’s DLRM model. The user must specify
the path of the raw dataset (using the `--data-path` flag), as well as the
output directory for all temporary/final data (using the `--out-path` flag)
Example Usage
-------------
python dask-nvtabular-criteo-benchmark.py
--data-path /path/to/criteo_parquet --out-path /out/dir/`
Dataset Requirements (Parquet)
------------------------------
This benchmark is designed with a parquet-formatted dataset in mind.
While a CSV-formatted dataset can be processed by NVTabular, converting
to parquet will yield significantly better performance. To convert your
dataset, try using the `optimize_criteo.ipynb` notebook (also located
in `NVTabular/examples/`)
For a detailed parameter overview see `NVTabular/examples/MultiGPUBench.md`
"""
# Input
data_path = args.data_path
freq_limit = args.freq_limit
out_files_per_proc = args.out_files_per_proc
high_card_columns = args.high_cards.split(",")
dashboard_port = args.dashboard_port
if args.protocol == "ucx":
UCX_TLS = os.environ.get("UCX_TLS", "tcp,cuda_copy,cuda_ipc,sockcm")
os.environ["UCX_TLS"] = UCX_TLS
# Cleanup output directory
BASE_DIR = args.out_path
dask_workdir = os.path.join(BASE_DIR, "workdir")
output_path = os.path.join(BASE_DIR, "output")
stats_path = os.path.join(BASE_DIR, "stats")
if not os.path.isdir(BASE_DIR):
os.mkdir(BASE_DIR)
for dir_path in (dask_workdir, output_path, stats_path):
if os.path.isdir(dir_path):
shutil.rmtree(dir_path)
os.mkdir(dir_path)
# Use Criteo dataset by default (for now)
cont_names = (
args.cont_names.split(",") if args.cont_names else ["I" + str(x) for x in range(1, 14)]
)
cat_names = (
args.cat_names.split(",") if args.cat_names else ["C" + str(x) for x in range(1, 27)]
)
label_name = ["label"]
# Specify Categorify/GroupbyStatistics options
tree_width = {}
cat_cache = {}
for col in cat_names:
if col in high_card_columns:
tree_width[col] = args.tree_width
cat_cache[col] = args.cat_cache_high
else:
tree_width[col] = 1
cat_cache[col] = args.cat_cache_low
# Use total device size to calculate args.device_limit_frac
device_size = device_mem_size(kind="total")
device_limit = int(args.device_limit_frac * device_size)
device_pool_size = int(args.device_pool_frac * device_size)
part_size = int(args.part_mem_frac * device_size)
# Parse shuffle option
shuffle = None
if args.shuffle == "PER_WORKER":
shuffle = nvt_io.Shuffle.PER_WORKER
elif args.shuffle == "PER_PARTITION":
shuffle = nvt_io.Shuffle.PER_PARTITION
# Check if any device memory is already occupied
for dev in args.devices.split(","):
fmem = _pynvml_mem_size(kind="free", index=int(dev))
used = (device_size - fmem) / 1e9
if used > 1.0:
warnings.warn(f"BEWARE - {used} GB is already occupied on device {int(dev)}!")
# Setup LocalCUDACluster
if args.protocol == "tcp":
cluster = LocalCUDACluster(
protocol=args.protocol,
n_workers=args.n_workers,
CUDA_VISIBLE_DEVICES=args.devices,
device_memory_limit=device_limit,
local_directory=dask_workdir,
dashboard_address=":" + dashboard_port,
)
else:
cluster = LocalCUDACluster(
protocol=args.protocol,
n_workers=args.n_workers,
CUDA_VISIBLE_DEVICES=args.devices,
enable_nvlink=True,
device_memory_limit=device_limit,
local_directory=dask_workdir,
dashboard_address=":" + dashboard_port,
)
client = Client(cluster)
# Setup RMM pool
if args.device_pool_frac > 0.01:
setup_rmm_pool(client, device_pool_size)
# Define Dask NVTabular "Workflow"
if args.normalize:
cont_features = cont_names >> ops.FillMissing() >> ops.Normalize()
else:
cont_features = cont_names >> ops.FillMissing() >> ops.Clip(min_value=0) >> ops.LogOp()
cat_features = cat_names >> ops.Categorify(
out_path=stats_path,
tree_width=tree_width,
cat_cache=cat_cache,
freq_threshold=freq_limit,
search_sorted=not freq_limit,
on_host=not args.cats_on_device,
)
processor = Workflow(cat_features + cont_features + label_name, client=client)
dataset = Dataset(data_path, "parquet", part_size=part_size)
# Execute the dask graph
runtime = time.time()
processor.fit(dataset)
if args.profile is not None:
with performance_report(filename=args.profile):
processor.transform(dataset).to_parquet(
output_path=output_path,
num_threads=args.num_io_threads,
shuffle=shuffle,
out_files_per_proc=out_files_per_proc,
)
else:
processor.transform(dataset).to_parquet(
output_path=output_path,
num_threads=args.num_io_threads,
shuffle=shuffle,
out_files_per_proc=out_files_per_proc,
)
runtime = time.time() - runtime
print("\\nDask-NVTabular DLRM/Criteo benchmark")
print("--------------------------------------")
print(f"partition size | {part_size}")
print(f"protocol | {args.protocol}")
print(f"device(s) | {args.devices}")
print(f"rmm-pool-frac | {(args.device_pool_frac)}")
print(f"out-files-per-proc | {args.out_files_per_proc}")
print(f"num_io_threads | {args.num_io_threads}")
print(f"shuffle | {args.shuffle}")
print(f"cats-on-device | {args.cats_on_device}")
print("======================================")
print(f"Runtime[s] | {runtime}")
print("======================================\\n")
client.close()
|
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-pool-frac 0.8\\ndistributed.worker - WARNING - Compute Failed\\nFunction: subgraph_callable\\nargs: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26\\n0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550\\n1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394\\n2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227\\n3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000\\nkwargs: {}\\nException: FileNotFoundError(2, \\'No such file or directory\\')\\n\\nTraceback (most recent call last):\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>\\nmain(parse_args())\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main\\noutput_path=output_path,\\nFile "/nvtabular/nvtabular/workflow.py", line 876, in apply\\ndtypes=dtypes,\\nFile "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph\\nnum_threads=num_io_threads,\\nFile "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset\\nnum_threads,\\nFile "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset\\nout = client.compute(out).result()\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result\\nraise exc.with_traceback(tb)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__\\nreturn core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get\\nresult = _execute_task(task, cache)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task\\nreturn func(*(_execute_task(a, cache) for a in args))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply\\nreturn func(*args, **kwargs)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce\\ndf = func(*args, **kwargs)\\nFile "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op\\ngdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)\\nFile "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner\\nreturn func(*args, **kwds)\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op\\ncat_names=cat_names,\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode\\ncache, path, columns=selection_r, cache=cat_cache, cats_only=True\\nFile "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data\\nwith open(path, "rb") as f:\\nFileNotFoundError: [Errno 2] No such file or directory: \\'gs://merlin-datasets/output/stats/categories/unique.C1.parquet\\''}]
|
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f
rac 0.7 --device-pool-frac 0.8
distributed.worker - WARNING - Compute Failed
Function: subgraph_callable
args: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26
0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550
1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394
2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227
3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000
kwargs: {}
Exception: FileNotFoundError(2, 'No such file or directory')
Traceback (most recent call last):
File "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>
main(parse_args())
File "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main
output_path=output_path,
File "/nvtabular/nvtabular/workflow.py", line 876, in apply
dtypes=dtypes,
File "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph
num_threads=num_io_threads,
File "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset
num_threads,
File "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset
out = client.compute(out).result()
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result
raise exc.with_traceback(tb)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__
return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get
result = _execute_task(task, cache)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task
return func(*(_execute_task(a, cache) for a in args))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply
return func(*args, **kwargs)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce
df = func(*args, **kwargs)
File "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op
gdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)
File "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner
return func(*args, **kwds)
File "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op
cat_names=cat_names,
File "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode
cache, path, columns=selection_r, cache=cat_cache, cats_only=True
File "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data
with open(path, "rb") as f:
FileNotFoundError: [Errno 2] No such file or directory: 'gs://merlin-datasets/output/stats/categories/unique.C1.parquet'
|
FileNotFoundError
|
def __init__(self, out_dir, **kwargs):
super().__init__(out_dir, **kwargs)
self.data_paths = []
self.data_files = []
self.data_writers = []
self.data_bios = []
self._lock = threading.RLock()
self.pwriter = self._pwriter
self.pwriter_kwargs = {}
|
def __init__(self, out_dir, **kwargs):
super().__init__(out_dir, **kwargs)
self.data_paths = []
self.data_writers = []
self.data_bios = []
self._lock = threading.RLock()
self.pwriter = self._pwriter
self.pwriter_kwargs = {}
|
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-pool-frac 0.8\\ndistributed.worker - WARNING - Compute Failed\\nFunction: subgraph_callable\\nargs: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26\\n0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550\\n1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394\\n2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227\\n3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000\\nkwargs: {}\\nException: FileNotFoundError(2, \\'No such file or directory\\')\\n\\nTraceback (most recent call last):\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>\\nmain(parse_args())\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main\\noutput_path=output_path,\\nFile "/nvtabular/nvtabular/workflow.py", line 876, in apply\\ndtypes=dtypes,\\nFile "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph\\nnum_threads=num_io_threads,\\nFile "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset\\nnum_threads,\\nFile "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset\\nout = client.compute(out).result()\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result\\nraise exc.with_traceback(tb)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__\\nreturn core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get\\nresult = _execute_task(task, cache)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task\\nreturn func(*(_execute_task(a, cache) for a in args))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply\\nreturn func(*args, **kwargs)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce\\ndf = func(*args, **kwargs)\\nFile "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op\\ngdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)\\nFile "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner\\nreturn func(*args, **kwds)\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op\\ncat_names=cat_names,\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode\\ncache, path, columns=selection_r, cache=cat_cache, cats_only=True\\nFile "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data\\nwith open(path, "rb") as f:\\nFileNotFoundError: [Errno 2] No such file or directory: \\'gs://merlin-datasets/output/stats/categories/unique.C1.parquet\\''}]
|
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f
rac 0.7 --device-pool-frac 0.8
distributed.worker - WARNING - Compute Failed
Function: subgraph_callable
args: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26
0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550
1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394
2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227
3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000
kwargs: {}
Exception: FileNotFoundError(2, 'No such file or directory')
Traceback (most recent call last):
File "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>
main(parse_args())
File "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main
output_path=output_path,
File "/nvtabular/nvtabular/workflow.py", line 876, in apply
dtypes=dtypes,
File "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph
num_threads=num_io_threads,
File "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset
num_threads,
File "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset
out = client.compute(out).result()
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result
raise exc.with_traceback(tb)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__
return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get
result = _execute_task(task, cache)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task
return func(*(_execute_task(a, cache) for a in args))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply
return func(*args, **kwargs)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce
df = func(*args, **kwargs)
File "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op
gdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)
File "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner
return func(*args, **kwds)
File "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op
cat_names=cat_names,
File "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode
cache, path, columns=selection_r, cache=cat_cache, cats_only=True
File "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data
with open(path, "rb") as f:
FileNotFoundError: [Errno 2] No such file or directory: 'gs://merlin-datasets/output/stats/categories/unique.C1.parquet'
|
FileNotFoundError
|
def _append_writer(self, path, schema=None, add_args=None, add_kwargs=None):
# Add additional args and kwargs
_args = add_args or []
_kwargs = tlz.merge(self.pwriter_kwargs, add_kwargs or {})
if self.bytes_io:
bio = BytesIO()
self.data_bios.append(bio)
self.data_writers.append(self.pwriter(bio, *_args, **_kwargs))
else:
f = fsspec.open(path, mode="wb").open()
self.data_files.append(f)
self.data_writers.append(self.pwriter(f, *_args, **_kwargs))
|
def _append_writer(self, path, schema=None, add_args=None, add_kwargs=None):
# Add additional args and kwargs
_args = add_args or []
_kwargs = tlz.merge(self.pwriter_kwargs, add_kwargs or {})
if self.bytes_io:
bio = BytesIO()
self.data_bios.append(bio)
self.data_writers.append(self.pwriter(bio, *_args, **_kwargs))
else:
self.data_writers.append(self.pwriter(path, *_args, **_kwargs))
|
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-pool-frac 0.8\\ndistributed.worker - WARNING - Compute Failed\\nFunction: subgraph_callable\\nargs: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26\\n0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550\\n1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394\\n2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227\\n3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000\\nkwargs: {}\\nException: FileNotFoundError(2, \\'No such file or directory\\')\\n\\nTraceback (most recent call last):\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>\\nmain(parse_args())\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main\\noutput_path=output_path,\\nFile "/nvtabular/nvtabular/workflow.py", line 876, in apply\\ndtypes=dtypes,\\nFile "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph\\nnum_threads=num_io_threads,\\nFile "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset\\nnum_threads,\\nFile "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset\\nout = client.compute(out).result()\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result\\nraise exc.with_traceback(tb)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__\\nreturn core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get\\nresult = _execute_task(task, cache)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task\\nreturn func(*(_execute_task(a, cache) for a in args))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply\\nreturn func(*args, **kwargs)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce\\ndf = func(*args, **kwargs)\\nFile "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op\\ngdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)\\nFile "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner\\nreturn func(*args, **kwds)\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op\\ncat_names=cat_names,\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode\\ncache, path, columns=selection_r, cache=cat_cache, cats_only=True\\nFile "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data\\nwith open(path, "rb") as f:\\nFileNotFoundError: [Errno 2] No such file or directory: \\'gs://merlin-datasets/output/stats/categories/unique.C1.parquet\\''}]
|
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f
rac 0.7 --device-pool-frac 0.8
distributed.worker - WARNING - Compute Failed
Function: subgraph_callable
args: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26
0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550
1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394
2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227
3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000
kwargs: {}
Exception: FileNotFoundError(2, 'No such file or directory')
Traceback (most recent call last):
File "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>
main(parse_args())
File "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main
output_path=output_path,
File "/nvtabular/nvtabular/workflow.py", line 876, in apply
dtypes=dtypes,
File "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph
num_threads=num_io_threads,
File "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset
num_threads,
File "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset
out = client.compute(out).result()
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result
raise exc.with_traceback(tb)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__
return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get
result = _execute_task(task, cache)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task
return func(*(_execute_task(a, cache) for a in args))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply
return func(*args, **kwargs)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce
df = func(*args, **kwargs)
File "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op
gdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)
File "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner
return func(*args, **kwds)
File "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op
cat_names=cat_names,
File "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode
cache, path, columns=selection_r, cache=cat_cache, cats_only=True
File "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data
with open(path, "rb") as f:
FileNotFoundError: [Errno 2] No such file or directory: 'gs://merlin-datasets/output/stats/categories/unique.C1.parquet'
|
FileNotFoundError
|
def _close_writers(self):
md_dict = {}
for writer, path in zip(self.data_writers, self.data_paths):
fn = path.split(self.fs.sep)[-1]
md_dict[fn] = writer.close(metadata_file_path=fn)
for f in self.data_files:
f.close()
return md_dict
|
def _close_writers(self):
md_dict = {}
for writer, path in zip(self.data_writers, self.data_paths):
fn = path.split(self.fs.sep)[-1]
md_dict[fn] = writer.close(metadata_file_path=fn)
return md_dict
|
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-pool-frac 0.8\\ndistributed.worker - WARNING - Compute Failed\\nFunction: subgraph_callable\\nargs: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26\\n0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550\\n1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394\\n2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227\\n3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000\\nkwargs: {}\\nException: FileNotFoundError(2, \\'No such file or directory\\')\\n\\nTraceback (most recent call last):\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>\\nmain(parse_args())\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main\\noutput_path=output_path,\\nFile "/nvtabular/nvtabular/workflow.py", line 876, in apply\\ndtypes=dtypes,\\nFile "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph\\nnum_threads=num_io_threads,\\nFile "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset\\nnum_threads,\\nFile "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset\\nout = client.compute(out).result()\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result\\nraise exc.with_traceback(tb)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__\\nreturn core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get\\nresult = _execute_task(task, cache)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task\\nreturn func(*(_execute_task(a, cache) for a in args))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply\\nreturn func(*args, **kwargs)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce\\ndf = func(*args, **kwargs)\\nFile "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op\\ngdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)\\nFile "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner\\nreturn func(*args, **kwds)\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op\\ncat_names=cat_names,\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode\\ncache, path, columns=selection_r, cache=cat_cache, cats_only=True\\nFile "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data\\nwith open(path, "rb") as f:\\nFileNotFoundError: [Errno 2] No such file or directory: \\'gs://merlin-datasets/output/stats/categories/unique.C1.parquet\\''}]
|
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f
rac 0.7 --device-pool-frac 0.8
distributed.worker - WARNING - Compute Failed
Function: subgraph_callable
args: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26
0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550
1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394
2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227
3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000
kwargs: {}
Exception: FileNotFoundError(2, 'No such file or directory')
Traceback (most recent call last):
File "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>
main(parse_args())
File "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main
output_path=output_path,
File "/nvtabular/nvtabular/workflow.py", line 876, in apply
dtypes=dtypes,
File "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph
num_threads=num_io_threads,
File "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset
num_threads,
File "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset
out = client.compute(out).result()
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result
raise exc.with_traceback(tb)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__
return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get
result = _execute_task(task, cache)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task
return func(*(_execute_task(a, cache) for a in args))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply
return func(*args, **kwargs)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce
df = func(*args, **kwargs)
File "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op
gdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)
File "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner
return func(*args, **kwds)
File "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op
cat_names=cat_names,
File "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode
cache, path, columns=selection_r, cache=cat_cache, cats_only=True
File "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data
with open(path, "rb") as f:
FileNotFoundError: [Errno 2] No such file or directory: 'gs://merlin-datasets/output/stats/categories/unique.C1.parquet'
|
FileNotFoundError
|
def fetch_table_data(
table_cache, path, cache="disk", cats_only=False, reader=None, columns=None, **kwargs
):
"""Utility to retrieve a cudf DataFrame from a cache (and add the
DataFrame to a cache if the element is missing). Note that `cats_only=True`
results in optimized logic for the `Categorify` transformation.
"""
table = table_cache.get(path, None)
if table and not isinstance(table, cudf.DataFrame):
if not cats_only:
return cudf.io.read_parquet(table, index=False)
df = cudf.io.read_parquet(table, index=False, columns=columns)
df.index.name = "labels"
df.reset_index(drop=False, inplace=True)
return df
reader = reader or cudf.io.read_parquet
if table is None:
if cache in ("device", "disk"):
table = reader(path, index=False, columns=columns, **kwargs)
elif cache == "host":
if reader == cudf.io.read_parquet:
# If the file is already in parquet format,
# we can just move the same bytes to host memory
with fsspec.open(path, "rb") as f:
table_cache[path] = BytesIO(f.read())
table = reader(table_cache[path], index=False, columns=columns, **kwargs)
else:
# Otherwise, we should convert the format to parquet
table = reader(path, index=False, columns=columns, **kwargs)
table_cache[path] = BytesIO()
table.to_parquet(table_cache[path])
if cats_only:
table.index.name = "labels"
table.reset_index(drop=False, inplace=True)
if cache == "device":
table_cache[path] = table.copy(deep=False)
return table
|
def fetch_table_data(
table_cache, path, cache="disk", cats_only=False, reader=None, columns=None, **kwargs
):
"""Utility to retrieve a cudf DataFrame from a cache (and add the
DataFrame to a cache if the element is missing). Note that `cats_only=True`
results in optimized logic for the `Categorify` transformation.
"""
table = table_cache.get(path, None)
if table and not isinstance(table, cudf.DataFrame):
if not cats_only:
return cudf.io.read_parquet(table, index=False)
df = cudf.io.read_parquet(table, index=False, columns=columns)
df.index.name = "labels"
df.reset_index(drop=False, inplace=True)
return df
reader = reader or cudf.io.read_parquet
if table is None:
if cache in ("device", "disk"):
table = reader(path, index=False, columns=columns, **kwargs)
elif cache == "host":
if reader == cudf.io.read_parquet:
# If the file is already in parquet format,
# we can just move the same bytes to host memory
with open(path, "rb") as f:
table_cache[path] = BytesIO(f.read())
table = reader(table_cache[path], index=False, columns=columns, **kwargs)
else:
# Otherwise, we should convert the format to parquet
table = reader(path, index=False, columns=columns, **kwargs)
table_cache[path] = BytesIO()
table.to_parquet(table_cache[path])
if cats_only:
table.index.name = "labels"
table.reset_index(drop=False, inplace=True)
if cache == "device":
table_cache[path] = table.copy(deep=False)
return table
|
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-pool-frac 0.8\\ndistributed.worker - WARNING - Compute Failed\\nFunction: subgraph_callable\\nargs: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26\\n0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550\\n1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394\\n2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227\\n3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000\\nkwargs: {}\\nException: FileNotFoundError(2, \\'No such file or directory\\')\\n\\nTraceback (most recent call last):\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>\\nmain(parse_args())\\nFile "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main\\noutput_path=output_path,\\nFile "/nvtabular/nvtabular/workflow.py", line 876, in apply\\ndtypes=dtypes,\\nFile "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph\\nnum_threads=num_io_threads,\\nFile "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset\\nnum_threads,\\nFile "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset\\nout = client.compute(out).result()\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result\\nraise exc.with_traceback(tb)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__\\nreturn core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get\\nresult = _execute_task(task, cache)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task\\nreturn func(*(_execute_task(a, cache) for a in args))\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply\\nreturn func(*args, **kwargs)\\nFile "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce\\ndf = func(*args, **kwargs)\\nFile "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op\\ngdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)\\nFile "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner\\nreturn func(*args, **kwds)\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op\\ncat_names=cat_names,\\nFile "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode\\ncache, path, columns=selection_r, cache=cat_cache, cats_only=True\\nFile "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data\\nwith open(path, "rb") as f:\\nFileNotFoundError: [Errno 2] No such file or directory: \\'gs://merlin-datasets/output/stats/categories/unique.C1.parquet\\''}]
|
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f
rac 0.7 --device-pool-frac 0.8
distributed.worker - WARNING - Compute Failed
Function: subgraph_callable
args: ( label I1 I2 I3 I4 I5 I6 I7 I8 I9 ... C17 C18 C19 C20 C21 C22 C23 C24 C25 C26
0 0 2.772589 5.808143 1.609438 2.397895 3.332205 0.000000 1.098612 4.442651 0.000000 ... -771205462 -1206449222 -864387787 359448199 -1761877609 357969245 -740331133 44548210 -842849922 -507617550
1 0 0.000000 5.147494 1.945910 4.584968 1.098612 0.000000 0.000000 4.394449 2.302585 ... <NA> -1206449222 -1793932789 <NA> <NA> <NA> <NA> -1441487878 809724924 -1775758394
2 0 2.639057 0.693147 0.693147 2.890372 0.000000 1.791759 0.000000 2.302585 2.833213 ... 1966974451 -1578429167 -1264946531 -2019528747 870435994 -322370806 -1701803791 2093085390 809724924 -317696227
3 0 4.110874 7.392032 0.693147 5.937536 3.610918 0.000
kwargs: {}
Exception: FileNotFoundError(2, 'No such file or directory')
Traceback (most recent call last):
File "examples/dask-nvtabular-criteo-benchmark.py", line 373, in <module>
main(parse_args())
File "examples/dask-nvtabular-criteo-benchmark.py", line 195, in main
output_path=output_path,
File "/nvtabular/nvtabular/workflow.py", line 876, in apply
dtypes=dtypes,
File "/nvtabular/nvtabular/workflow.py", line 991, in build_and_process_graph
num_threads=num_io_threads,
File "/nvtabular/nvtabular/workflow.py", line 1080, in ddf_to_dataset
num_threads,
File "/nvtabular/nvtabular/io/dask.py", line 110, in _ddf_to_dataset
out = client.compute(out).result()
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/distributed/client.py", line 225, in result
raise exc.with_traceback(tb)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/optimization.py", line 961, in __call__
return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 151, in get
result = _execute_task(task, cache)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/core.py", line 121, in _execute_task
return func(*(_execute_task(a, cache) for a in args))
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/utils.py", line 29, in apply
return func(*args, **kwargs)
File "/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/core.py", line 5298, in apply_and_enforce
df = func(*args, **kwargs)
File "/nvtabular/nvtabular/workflow.py", line 723, in _aggregated_op
gdf = logic(gdf, columns_ctx, cols_grp, target_cols, stats_context)
File "/opt/conda/envs/rapids/lib/python3.7/contextlib.py", line 74, in inner
return func(*args, **kwds)
File "/nvtabular/nvtabular/ops/categorify.py", line 365, in apply_op
cat_names=cat_names,
File "/nvtabular/nvtabular/ops/categorify.py", line 871, in _encode
cache, path, columns=selection_r, cache=cat_cache, cats_only=True
File "/nvtabular/nvtabular/worker.py", line 84, in fetch_table_data
with open(path, "rb") as f:
FileNotFoundError: [Errno 2] No such file or directory: 'gs://merlin-datasets/output/stats/categories/unique.C1.parquet'
|
FileNotFoundError
|
def _chunkwise_moments(df):
df2 = cudf.DataFrame()
for col in df.columns:
df2[col] = df[col].astype("float64").pow(2)
vals = {
"df-count": df.count().to_frame().transpose(),
"df-sum": df.sum().astype("float64").to_frame().transpose(),
"df2-sum": df2.sum().to_frame().transpose(),
}
# NOTE: Perhaps we should convert to pandas here
# (since we know the results should be small)?
del df2
return vals
|
def _chunkwise_moments(df):
df2 = cudf.DataFrame()
for col in df.columns:
df2[col] = df[col].astype("float64").pow(2)
vals = {
"df-count": df.count().to_frame().transpose(),
"df-sum": df.sum().to_frame().transpose(),
"df2-sum": df2.sum().to_frame().transpose(),
}
# NOTE: Perhaps we should convert to pandas here
# (since we know the results should be small)?
del df2
return vals
|
[{'piece_type': 'error message', 'piece_content': '/opt/conda/envs/rapids/lib/python3.7/site-packages/pandas/core/series.py:726: RuntimeWarning: invalid value encountered in sqrt\\nresult = getattr(ufunc, method)(*inputs, **kwargs)\\n---------------------------------------------------------------------------\\nValueError Traceback (most recent call last)\\n<timed eval> in <module>\\n\\n/nvtabular0.3/NVTabular/nvtabular/workflow.py in apply(self, dataset, apply_offline, record_stats, shuffle, output_path, output_format, out_files_per_proc, num_io_threads, dtypes)\\n869 out_files_per_proc=out_files_per_proc,\\n870 num_io_threads=num_io_threads,\\n--> 871 dtypes=dtypes,\\n872 )\\n873 else:\\n\\n/nvtabular0.3/NVTabular/nvtabular/workflow.py in build_and_process_graph(self, dataset, end_phase, output_path, record_stats, shuffle, output_format, out_files_per_proc, apply_ops, num_io_threads, dtypes)\\n968 self._base_phase = 0 # Set _base_phase\\n969 for idx, _ in enumerate(self.phases[:end]):\\n--> 970 self.exec_phase(idx, record_stats=record_stats, update_ddf=(idx == (end - 1)))\\n971 self._base_phase = 0 # Re-Set _base_phase\\n972\\n\\n/nvtabular0.3/NVTabular/nvtabular/workflow.py in exec_phase(self, phase_index, record_stats, update_ddf)\\n755 _ddf = self.get_ddf()\\n756 if transforms:\\n--> 757 _ddf = self._aggregated_dask_transform(_ddf, transforms)\\n758\\n759 stats = []\\n\\n/nvtabular0.3/NVTabular/nvtabular/workflow.py in _aggregated_dask_transform(self, ddf, transforms)\\n724 for transform in transforms:\\n725 columns_ctx, cols_grp, target_cols, logic, stats_context = transform\\n--> 726 meta = logic(meta, columns_ctx, cols_grp, target_cols, stats_context)\\n727 return ddf.map_partitions(self.__class__._aggregated_op, transforms, meta=meta)\\n728\\n\\n/nvtabular0.3/NVTabular/nvtabular/ops/transform_operator.py in apply_op(self, gdf, columns_ctx, input_cols, target_cols, stats_context)\\n89 new_gdf = self.op_logic(gdf, target_columns, stats_context=stats_context)\\n90 self.update_columns_ctx(columns_ctx, input_cols, new_gdf.columns, target_columns)\\n---> 91 return self.assemble_new_df(gdf, new_gdf, target_columns)\\n92\\n93 def assemble_new_df(self, origin_gdf, new_gdf, target_columns):\\n\\n/nvtabular0.3/NVTabular/nvtabular/ops/transform_operator.py in assemble_new_df(self, origin_gdf, new_gdf, target_columns)\\n96 return new_gdf\\n97 else:\\n---> 98 origin_gdf[target_columns] = new_gdf\\n99 return origin_gdf\\n100 return cudf.concat([origin_gdf, new_gdf], axis=1)\\n\\n/opt/conda/envs/rapids/lib/python3.7/contextlib.py in inner(*args, **kwds)\\n72 def inner(*args, **kwds):\\n73 with self._recreate_cm():\\n---> 74 return func(*args, **kwds)\\n75 return inner\\n76\\n\\n/opt/conda/envs/rapids/lib/python3.7/site-packages/cudf/core/dataframe.py in __setitem__(self, arg, value)\\n777 replace_df=value,\\n778 input_cols=arg,\\n--> 779 mask=None,\\n780 )\\n781 else:\\n\\n/opt/conda/envs/rapids/lib/python3.7/site-packages/cudf/core/dataframe.py in _setitem_with_dataframe(input_df, replace_df, input_cols, mask)\\n7266 if len(input_cols) != len(replace_df.columns):\\n7267 raise ValueError(\\n-> 7268 "Number of Input Columns must be same replacement Dataframe"\\n7269 )\\n7270\\n\\nValueError: Number of Input Columns must be same replacement Dataframe'}, {'piece_type': 'other', 'piece_content': 'ds = nvt.Dataset(INPUT_PATH, engine="parquet", part_size="1000MB")\\n\\ncat_names = []\\ncont_names = [\\'NetworkTestLatency\\']\\nlabel_name = []\\n\\n# Initalize our Workflow\\nworkflow = nvt.Workflow(cat_names=cat_names, cont_names=cont_names, label_name=label_name)\\n\\n\\n# Add Normalize to the workflow for continuous columns\\nworkflow.add_cont_feature(nvt.ops.Normalize())\\n\\n# Finalize the Workflow\\nworkflow.finalize()\\n\\n%%time\\n\\nworkflow.apply(\\nds,\\noutput_format="parquet",\\noutput_path=OUTPUT_PATH,\\nshuffle=None,\\nout_files_per_proc=1,\\n)'}]
|
/opt/conda/envs/rapids/lib/python3.7/site-packages/pandas/core/series.py:726: RuntimeWarning: invalid value encountered in sqrt
result = getattr(ufunc, method)(*inputs, **kwargs)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<timed eval> in <module>
/nvtabular0.3/NVTabular/nvtabular/workflow.py in apply(self, dataset, apply_offline, record_stats, shuffle, output_path, output_format, out_files_per_proc, num_io_threads, dtypes)
869 out_files_per_proc=out_files_per_proc,
870 num_io_threads=num_io_threads,
--> 871 dtypes=dtypes,
872 )
873 else:
/nvtabular0.3/NVTabular/nvtabular/workflow.py in build_and_process_graph(self, dataset, end_phase, output_path, record_stats, shuffle, output_format, out_files_per_proc, apply_ops, num_io_threads, dtypes)
968 self._base_phase = 0 # Set _base_phase
969 for idx, _ in enumerate(self.phases[:end]):
--> 970 self.exec_phase(idx, record_stats=record_stats, update_ddf=(idx == (end - 1)))
971 self._base_phase = 0 # Re-Set _base_phase
972
/nvtabular0.3/NVTabular/nvtabular/workflow.py in exec_phase(self, phase_index, record_stats, update_ddf)
755 _ddf = self.get_ddf()
756 if transforms:
--> 757 _ddf = self._aggregated_dask_transform(_ddf, transforms)
758
759 stats = []
/nvtabular0.3/NVTabular/nvtabular/workflow.py in _aggregated_dask_transform(self, ddf, transforms)
724 for transform in transforms:
725 columns_ctx, cols_grp, target_cols, logic, stats_context = transform
--> 726 meta = logic(meta, columns_ctx, cols_grp, target_cols, stats_context)
727 return ddf.map_partitions(self.__class__._aggregated_op, transforms, meta=meta)
728
/nvtabular0.3/NVTabular/nvtabular/ops/transform_operator.py in apply_op(self, gdf, columns_ctx, input_cols, target_cols, stats_context)
89 new_gdf = self.op_logic(gdf, target_columns, stats_context=stats_context)
90 self.update_columns_ctx(columns_ctx, input_cols, new_gdf.columns, target_columns)
---> 91 return self.assemble_new_df(gdf, new_gdf, target_columns)
92
93 def assemble_new_df(self, origin_gdf, new_gdf, target_columns):
/nvtabular0.3/NVTabular/nvtabular/ops/transform_operator.py in assemble_new_df(self, origin_gdf, new_gdf, target_columns)
96 return new_gdf
97 else:
---> 98 origin_gdf[target_columns] = new_gdf
99 return origin_gdf
100 return cudf.concat([origin_gdf, new_gdf], axis=1)
/opt/conda/envs/rapids/lib/python3.7/contextlib.py in inner(*args, **kwds)
72 def inner(*args, **kwds):
73 with self._recreate_cm():
---> 74 return func(*args, **kwds)
75 return inner
76
/opt/conda/envs/rapids/lib/python3.7/site-packages/cudf/core/dataframe.py in __setitem__(self, arg, value)
777 replace_df=value,
778 input_cols=arg,
--> 779 mask=None,
780 )
781 else:
/opt/conda/envs/rapids/lib/python3.7/site-packages/cudf/core/dataframe.py in _setitem_with_dataframe(input_df, replace_df, input_cols, mask)
7266 if len(input_cols) != len(replace_df.columns):
7267 raise ValueError(
-> 7268 "Number of Input Columns must be same replacement Dataframe"
7269 )
7270
ValueError: Number of Input Columns must be same replacement Dataframe
|
ValueError
|
def to_ddf(self, columns=None):
return dask_cudf.read_parquet(
self.paths,
columns=columns,
# can't omit reading the index in if we aren't being passed columns
index=None if columns is None else False,
gather_statistics=False,
split_row_groups=self.row_groups_per_part,
storage_options=self.storage_options,
)
|
def to_ddf(self, columns=None):
return dask_cudf.read_parquet(
self.paths,
columns=columns,
index=False,
gather_statistics=False,
split_row_groups=self.row_groups_per_part,
storage_options=self.storage_options,
)
|
[{'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nValueError Traceback (most recent call last)\\n<ipython-input-13-b133e2b51cbf> in <module>\\n2 valid_dataset = nvt.Dataset(OUTPUT_BUCKET_FOLDER+\\'valid_gdf.parquet\\', part_mem_fraction=0.12)\\n3\\n----> 4 workflow.apply(train_dataset, record_stats=True, output_path=output_train_dir, shuffle=True, out_files_per_proc=5)\\n5 workflow.apply(valid_dataset, record_stats=False, output_path=output_valid_dir, shuffle=False, out_files_per_proc=5)\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in apply(self, dataset, apply_offline, record_stats, shuffle, output_path, output_format, out_files_per_proc, num_io_threads, dtypes)\\n782 out_files_per_proc=out_files_per_proc,\\n783 num_io_threads=num_io_threads,\\n--> 784 dtypes=dtypes,\\n785 )\\n786 else:\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in build_and_process_graph(self, dataset, end_phase, output_path, record_stats, shuffle, output_format, out_files_per_proc, apply_ops, num_io_threads, dtypes)\\n885 self._base_phase = 0 # Set _base_phase\\n886 for idx, _ in enumerate(self.phases[:end]):\\n--> 887 self.exec_phase(idx, record_stats=record_stats, update_ddf=(idx == (end - 1)))\\n888 self._base_phase = 0 # Re-Set _base_phase\\n889\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in exec_phase(self, phase_index, record_stats, update_ddf)\\n631\\n632 # Perform transforms as single dask task (per ddf partition)\\n--> 633 _ddf = self.get_ddf()\\n634 if transforms:\\n635 _ddf = self._aggregated_dask_transform(_ddf, transforms)\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in get_ddf(self)\\n587 elif isinstance(self.ddf, Dataset):\\n588 columns = self.columns_ctx["all"]["base"]\\n--> 589 return self.ddf.to_ddf(columns=columns, shuffle=self._shuffle_parts)\\n590 return self.ddf\\n591\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/io/dataset.py in to_ddf(self, columns, shuffle, seed)\\n263 """\\n264 # Use DatasetEngine to create ddf\\n--> 265 ddf = self.engine.to_ddf(columns=columns)\\n266\\n267 # Shuffle the partitions of ddf (optional)\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/io/parquet.py in to_ddf(self, columns)\\n102 gather_statistics=False,\\n103 split_row_groups=self.row_groups_per_part,\\n--> 104 storage_options=self.storage_options,\\n105 )\\n106\\n\\n/opt/conda/envs/rapids/lib/python3.7/site-packages/dask_cudf/io/parquet.py in read_parquet(path, columns, split_row_groups, row_groups_per_part, **kwargs)\\n192 split_row_groups=split_row_groups,\\n193 engine=CudfEngine,\\n--> 194 **kwargs,\\n195 )\\n196\\n\\n/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/io/parquet/core.py in read_parquet(path, columns, filters, categories, index, storage_options, engine, gather_statistics, split_row_groups, chunksize, **kwargs)\\n248 # Modify `meta` dataframe accordingly\\n249 meta, index, columns = set_index_columns(\\n--> 250 meta, index, columns, index_in_columns, auto_index_allowed\\n251 )\\n252 if meta.index.name == NONE_LABEL:\\n\\n/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/io/parquet/core.py in set_index_columns(meta, index, columns, index_in_columns, auto_index_allowed)\\n771 "The following columns were not found in the dataset %s\\\\n"\\n772 "The following columns were found %s"\\n--> 773 % (set(columns) - set(meta.columns), meta.columns)\\n774 )\\n775\\n\\nValueError: The following columns were not found in the dataset {\\'document_id_promo_count\\', \\'publish_time_days_since_published\\', \\'campaign_id_clicked_sum_ctr\\', \\'ad_id_count\\', \\'ad_id_clicked_sum_ctr\\', \\'source_id_clicked_sum_ctr\\', \\'publish_time_promo_days_since_published\\', \\'advertiser_id_clicked_sum_ctr\\', \\'document_id_promo_clicked_sum_ctr\\', \\'publisher_id_clicked_sum_ctr\\', \\'geo_location_country\\', \\'geo_location_state\\'}\\nThe following columns were found Index([\\'display_id\\', \\'ad_id\\', \\'clicked\\', \\'uuid\\', \\'document_id\\', \\'timestamp\\',\\n\\'platform\\', \\'geo_location\\', \\'document_id_promo\\', \\'campaign_id\\',\\n\\'advertiser_id\\', \\'source_id\\', \\'publisher_id\\', \\'publish_time\\',\\n\\'source_id_promo\\', \\'publisher_id_promo\\', \\'publish_time_promo\\',\\n\\'day_event\\'],\\ndtype=\\'object\\')'}, {'piece_type': 'reproducing source code', 'piece_content': 'import cudf\\nimport nvtabular as nvt\\nfrom nvtabular.ops import LambdaOp, Categorify\\n\\n# Stripped down dataset with geo_locaiton codes like in outbrains\\ndf = cudf.DataFrame({"geo_location": ["US>CA", "CA>BC", "US>TN>659"]})\\n\\n# defining a simple workflow that strips out the country code from the first two digits of the\\n# geo_location code and sticks in a new \\'geo_location_country\\' field\\nCATEGORICAL_COLUMNS = ["geo_location", "geo_location_country"]\\nworkflow = nvt.Workflow(cat_names=CATEGORICAL_COLUMNS, cont_names=[], label_name=[])\\nworkflow.add_feature(\\n[\\nLambdaOp(\\nop_name="country",\\nf=lambda col, gdf: col.str.slice(0, 2),\\ncolumns=["geo_location"],\\nreplace=False,\\n),\\nCategorify(),\\n]\\n)\\nworkflow.finalize()\\n\\n# This fails because \\'geo_location_country\\' isn\\'t in the parquet file, but we\\'re listing\\n# as a column\\ndf.to_parquet("geo.parquet")\\nworkflow.apply(nvt.Dataset("geo.parquet"), output_path=None)'}]
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-13-b133e2b51cbf> in <module>
2 valid_dataset = nvt.Dataset(OUTPUT_BUCKET_FOLDER+'valid_gdf.parquet', part_mem_fraction=0.12)
3
----> 4 workflow.apply(train_dataset, record_stats=True, output_path=output_train_dir, shuffle=True, out_files_per_proc=5)
5 workflow.apply(valid_dataset, record_stats=False, output_path=output_valid_dir, shuffle=False, out_files_per_proc=5)
/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in apply(self, dataset, apply_offline, record_stats, shuffle, output_path, output_format, out_files_per_proc, num_io_threads, dtypes)
782 out_files_per_proc=out_files_per_proc,
783 num_io_threads=num_io_threads,
--> 784 dtypes=dtypes,
785 )
786 else:
/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in build_and_process_graph(self, dataset, end_phase, output_path, record_stats, shuffle, output_format, out_files_per_proc, apply_ops, num_io_threads, dtypes)
885 self._base_phase = 0 # Set _base_phase
886 for idx, _ in enumerate(self.phases[:end]):
--> 887 self.exec_phase(idx, record_stats=record_stats, update_ddf=(idx == (end - 1)))
888 self._base_phase = 0 # Re-Set _base_phase
889
/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in exec_phase(self, phase_index, record_stats, update_ddf)
631
632 # Perform transforms as single dask task (per ddf partition)
--> 633 _ddf = self.get_ddf()
634 if transforms:
635 _ddf = self._aggregated_dask_transform(_ddf, transforms)
/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in get_ddf(self)
587 elif isinstance(self.ddf, Dataset):
588 columns = self.columns_ctx["all"]["base"]
--> 589 return self.ddf.to_ddf(columns=columns, shuffle=self._shuffle_parts)
590 return self.ddf
591
/rapids/notebooks/benf/NVTabular/nvtabular/io/dataset.py in to_ddf(self, columns, shuffle, seed)
263 """
264 # Use DatasetEngine to create ddf
--> 265 ddf = self.engine.to_ddf(columns=columns)
266
267 # Shuffle the partitions of ddf (optional)
/rapids/notebooks/benf/NVTabular/nvtabular/io/parquet.py in to_ddf(self, columns)
102 gather_statistics=False,
103 split_row_groups=self.row_groups_per_part,
--> 104 storage_options=self.storage_options,
105 )
106
/opt/conda/envs/rapids/lib/python3.7/site-packages/dask_cudf/io/parquet.py in read_parquet(path, columns, split_row_groups, row_groups_per_part, **kwargs)
192 split_row_groups=split_row_groups,
193 engine=CudfEngine,
--> 194 **kwargs,
195 )
196
/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/io/parquet/core.py in read_parquet(path, columns, filters, categories, index, storage_options, engine, gather_statistics, split_row_groups, chunksize, **kwargs)
248 # Modify `meta` dataframe accordingly
249 meta, index, columns = set_index_columns(
--> 250 meta, index, columns, index_in_columns, auto_index_allowed
251 )
252 if meta.index.name == NONE_LABEL:
/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/io/parquet/core.py in set_index_columns(meta, index, columns, index_in_columns, auto_index_allowed)
771 "The following columns were not found in the dataset %s\\n"
772 "The following columns were found %s"
--> 773 % (set(columns) - set(meta.columns), meta.columns)
774 )
775
ValueError: The following columns were not found in the dataset {'document_id_promo_count', 'publish_time_days_since_published', 'campaign_id_clicked_sum_ctr', 'ad_id_count', 'ad_id_clicked_sum_ctr', 'source_id_clicked_sum_ctr', 'publish_time_promo_days_since_published', 'advertiser_id_clicked_sum_ctr', 'document_id_promo_clicked_sum_ctr', 'publisher_id_clicked_sum_ctr', 'geo_location_country', 'geo_location_state'}
The following columns were found Index(['display_id', 'ad_id', 'clicked', 'uuid', 'document_id', 'timestamp',
'platform', 'geo_location', 'document_id_promo', 'campaign_id',
'advertiser_id', 'source_id', 'publisher_id', 'publish_time',
'source_id_promo', 'publisher_id_promo', 'publish_time_promo',
'day_event'],
dtype='object')
|
ValueError
|
def get_ddf(self):
if self.ddf is None:
raise ValueError("No dask_cudf frame available.")
elif isinstance(self.ddf, Dataset):
# Right now we can't distinguish between input columns and generated columns
# in the dataset, we don't limit the columm set right now in the to_ddf call
# (https://github.com/NVIDIA/NVTabular/issues/409 )
return self.ddf.to_ddf(shuffle=self._shuffle_parts)
return self.ddf
|
def get_ddf(self):
if self.ddf is None:
raise ValueError("No dask_cudf frame available.")
elif isinstance(self.ddf, Dataset):
columns = self.columns_ctx["all"]["base"]
return self.ddf.to_ddf(columns=columns, shuffle=self._shuffle_parts)
return self.ddf
|
[{'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nValueError Traceback (most recent call last)\\n<ipython-input-13-b133e2b51cbf> in <module>\\n2 valid_dataset = nvt.Dataset(OUTPUT_BUCKET_FOLDER+\\'valid_gdf.parquet\\', part_mem_fraction=0.12)\\n3\\n----> 4 workflow.apply(train_dataset, record_stats=True, output_path=output_train_dir, shuffle=True, out_files_per_proc=5)\\n5 workflow.apply(valid_dataset, record_stats=False, output_path=output_valid_dir, shuffle=False, out_files_per_proc=5)\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in apply(self, dataset, apply_offline, record_stats, shuffle, output_path, output_format, out_files_per_proc, num_io_threads, dtypes)\\n782 out_files_per_proc=out_files_per_proc,\\n783 num_io_threads=num_io_threads,\\n--> 784 dtypes=dtypes,\\n785 )\\n786 else:\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in build_and_process_graph(self, dataset, end_phase, output_path, record_stats, shuffle, output_format, out_files_per_proc, apply_ops, num_io_threads, dtypes)\\n885 self._base_phase = 0 # Set _base_phase\\n886 for idx, _ in enumerate(self.phases[:end]):\\n--> 887 self.exec_phase(idx, record_stats=record_stats, update_ddf=(idx == (end - 1)))\\n888 self._base_phase = 0 # Re-Set _base_phase\\n889\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in exec_phase(self, phase_index, record_stats, update_ddf)\\n631\\n632 # Perform transforms as single dask task (per ddf partition)\\n--> 633 _ddf = self.get_ddf()\\n634 if transforms:\\n635 _ddf = self._aggregated_dask_transform(_ddf, transforms)\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in get_ddf(self)\\n587 elif isinstance(self.ddf, Dataset):\\n588 columns = self.columns_ctx["all"]["base"]\\n--> 589 return self.ddf.to_ddf(columns=columns, shuffle=self._shuffle_parts)\\n590 return self.ddf\\n591\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/io/dataset.py in to_ddf(self, columns, shuffle, seed)\\n263 """\\n264 # Use DatasetEngine to create ddf\\n--> 265 ddf = self.engine.to_ddf(columns=columns)\\n266\\n267 # Shuffle the partitions of ddf (optional)\\n\\n/rapids/notebooks/benf/NVTabular/nvtabular/io/parquet.py in to_ddf(self, columns)\\n102 gather_statistics=False,\\n103 split_row_groups=self.row_groups_per_part,\\n--> 104 storage_options=self.storage_options,\\n105 )\\n106\\n\\n/opt/conda/envs/rapids/lib/python3.7/site-packages/dask_cudf/io/parquet.py in read_parquet(path, columns, split_row_groups, row_groups_per_part, **kwargs)\\n192 split_row_groups=split_row_groups,\\n193 engine=CudfEngine,\\n--> 194 **kwargs,\\n195 )\\n196\\n\\n/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/io/parquet/core.py in read_parquet(path, columns, filters, categories, index, storage_options, engine, gather_statistics, split_row_groups, chunksize, **kwargs)\\n248 # Modify `meta` dataframe accordingly\\n249 meta, index, columns = set_index_columns(\\n--> 250 meta, index, columns, index_in_columns, auto_index_allowed\\n251 )\\n252 if meta.index.name == NONE_LABEL:\\n\\n/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/io/parquet/core.py in set_index_columns(meta, index, columns, index_in_columns, auto_index_allowed)\\n771 "The following columns were not found in the dataset %s\\\\n"\\n772 "The following columns were found %s"\\n--> 773 % (set(columns) - set(meta.columns), meta.columns)\\n774 )\\n775\\n\\nValueError: The following columns were not found in the dataset {\\'document_id_promo_count\\', \\'publish_time_days_since_published\\', \\'campaign_id_clicked_sum_ctr\\', \\'ad_id_count\\', \\'ad_id_clicked_sum_ctr\\', \\'source_id_clicked_sum_ctr\\', \\'publish_time_promo_days_since_published\\', \\'advertiser_id_clicked_sum_ctr\\', \\'document_id_promo_clicked_sum_ctr\\', \\'publisher_id_clicked_sum_ctr\\', \\'geo_location_country\\', \\'geo_location_state\\'}\\nThe following columns were found Index([\\'display_id\\', \\'ad_id\\', \\'clicked\\', \\'uuid\\', \\'document_id\\', \\'timestamp\\',\\n\\'platform\\', \\'geo_location\\', \\'document_id_promo\\', \\'campaign_id\\',\\n\\'advertiser_id\\', \\'source_id\\', \\'publisher_id\\', \\'publish_time\\',\\n\\'source_id_promo\\', \\'publisher_id_promo\\', \\'publish_time_promo\\',\\n\\'day_event\\'],\\ndtype=\\'object\\')'}, {'piece_type': 'reproducing source code', 'piece_content': 'import cudf\\nimport nvtabular as nvt\\nfrom nvtabular.ops import LambdaOp, Categorify\\n\\n# Stripped down dataset with geo_locaiton codes like in outbrains\\ndf = cudf.DataFrame({"geo_location": ["US>CA", "CA>BC", "US>TN>659"]})\\n\\n# defining a simple workflow that strips out the country code from the first two digits of the\\n# geo_location code and sticks in a new \\'geo_location_country\\' field\\nCATEGORICAL_COLUMNS = ["geo_location", "geo_location_country"]\\nworkflow = nvt.Workflow(cat_names=CATEGORICAL_COLUMNS, cont_names=[], label_name=[])\\nworkflow.add_feature(\\n[\\nLambdaOp(\\nop_name="country",\\nf=lambda col, gdf: col.str.slice(0, 2),\\ncolumns=["geo_location"],\\nreplace=False,\\n),\\nCategorify(),\\n]\\n)\\nworkflow.finalize()\\n\\n# This fails because \\'geo_location_country\\' isn\\'t in the parquet file, but we\\'re listing\\n# as a column\\ndf.to_parquet("geo.parquet")\\nworkflow.apply(nvt.Dataset("geo.parquet"), output_path=None)'}]
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-13-b133e2b51cbf> in <module>
2 valid_dataset = nvt.Dataset(OUTPUT_BUCKET_FOLDER+'valid_gdf.parquet', part_mem_fraction=0.12)
3
----> 4 workflow.apply(train_dataset, record_stats=True, output_path=output_train_dir, shuffle=True, out_files_per_proc=5)
5 workflow.apply(valid_dataset, record_stats=False, output_path=output_valid_dir, shuffle=False, out_files_per_proc=5)
/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in apply(self, dataset, apply_offline, record_stats, shuffle, output_path, output_format, out_files_per_proc, num_io_threads, dtypes)
782 out_files_per_proc=out_files_per_proc,
783 num_io_threads=num_io_threads,
--> 784 dtypes=dtypes,
785 )
786 else:
/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in build_and_process_graph(self, dataset, end_phase, output_path, record_stats, shuffle, output_format, out_files_per_proc, apply_ops, num_io_threads, dtypes)
885 self._base_phase = 0 # Set _base_phase
886 for idx, _ in enumerate(self.phases[:end]):
--> 887 self.exec_phase(idx, record_stats=record_stats, update_ddf=(idx == (end - 1)))
888 self._base_phase = 0 # Re-Set _base_phase
889
/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in exec_phase(self, phase_index, record_stats, update_ddf)
631
632 # Perform transforms as single dask task (per ddf partition)
--> 633 _ddf = self.get_ddf()
634 if transforms:
635 _ddf = self._aggregated_dask_transform(_ddf, transforms)
/rapids/notebooks/benf/NVTabular/nvtabular/workflow.py in get_ddf(self)
587 elif isinstance(self.ddf, Dataset):
588 columns = self.columns_ctx["all"]["base"]
--> 589 return self.ddf.to_ddf(columns=columns, shuffle=self._shuffle_parts)
590 return self.ddf
591
/rapids/notebooks/benf/NVTabular/nvtabular/io/dataset.py in to_ddf(self, columns, shuffle, seed)
263 """
264 # Use DatasetEngine to create ddf
--> 265 ddf = self.engine.to_ddf(columns=columns)
266
267 # Shuffle the partitions of ddf (optional)
/rapids/notebooks/benf/NVTabular/nvtabular/io/parquet.py in to_ddf(self, columns)
102 gather_statistics=False,
103 split_row_groups=self.row_groups_per_part,
--> 104 storage_options=self.storage_options,
105 )
106
/opt/conda/envs/rapids/lib/python3.7/site-packages/dask_cudf/io/parquet.py in read_parquet(path, columns, split_row_groups, row_groups_per_part, **kwargs)
192 split_row_groups=split_row_groups,
193 engine=CudfEngine,
--> 194 **kwargs,
195 )
196
/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/io/parquet/core.py in read_parquet(path, columns, filters, categories, index, storage_options, engine, gather_statistics, split_row_groups, chunksize, **kwargs)
248 # Modify `meta` dataframe accordingly
249 meta, index, columns = set_index_columns(
--> 250 meta, index, columns, index_in_columns, auto_index_allowed
251 )
252 if meta.index.name == NONE_LABEL:
/opt/conda/envs/rapids/lib/python3.7/site-packages/dask/dataframe/io/parquet/core.py in set_index_columns(meta, index, columns, index_in_columns, auto_index_allowed)
771 "The following columns were not found in the dataset %s\\n"
772 "The following columns were found %s"
--> 773 % (set(columns) - set(meta.columns), meta.columns)
774 )
775
ValueError: The following columns were not found in the dataset {'document_id_promo_count', 'publish_time_days_since_published', 'campaign_id_clicked_sum_ctr', 'ad_id_count', 'ad_id_clicked_sum_ctr', 'source_id_clicked_sum_ctr', 'publish_time_promo_days_since_published', 'advertiser_id_clicked_sum_ctr', 'document_id_promo_clicked_sum_ctr', 'publisher_id_clicked_sum_ctr', 'geo_location_country', 'geo_location_state'}
The following columns were found Index(['display_id', 'ad_id', 'clicked', 'uuid', 'document_id', 'timestamp',
'platform', 'geo_location', 'document_id_promo', 'campaign_id',
'advertiser_id', 'source_id', 'publisher_id', 'publish_time',
'source_id_promo', 'publisher_id_promo', 'publish_time_promo',
'day_event'],
dtype='object')
|
ValueError
|
def add_data(self, gdf):
# Populate columns idxs
if not self.col_idx:
for i, x in enumerate(gdf.columns.values):
self.col_idx[str(x)] = i
# list columns in cudf don't currently support chunked writing in parquet.
# hack around this by just writing a single file with this partition
# this restriction can be removed once cudf supports chunked writing
# in parquet
if any(is_list_dtype(gdf[col].dtype) for col in gdf.columns):
self._write_table(0, gdf, True)
return
# Generate `ind` array to map each row to an output file.
# This approach is certainly more optimized for shuffling
# than it is for non-shuffling, but using a single code
# path is probably worth the (possible) minor overhead.
nrows = gdf.shape[0]
typ = np.min_scalar_type(nrows * 2)
if self.shuffle:
ind = cp.random.choice(cp.arange(self.num_out_files, dtype=typ), nrows)
else:
ind = cp.arange(nrows, dtype=typ)
cp.floor_divide(ind, math.ceil(nrows / self.num_out_files), out=ind)
for x, group in enumerate(
gdf.scatter_by_map(ind, map_size=self.num_out_files, keep_index=False)
):
self.num_samples[x] += len(group)
if self.num_threads > 1:
self.queue.put((x, group))
else:
self._write_table(x, group)
# wait for all writes to finish before exiting
# (so that we aren't using memory)
if self.num_threads > 1:
self.queue.join()
|
def add_data(self, gdf):
# Populate columns idxs
if not self.col_idx:
for i, x in enumerate(gdf.columns.values):
self.col_idx[str(x)] = i
# list columns in cudf don't currently support chunked writing in parquet.
# hack around this by just writing a single file with this partition
# this restriction can be removed once cudf supports chunked writing
# in parquet
if any(is_list_dtype(gdf[col].dtype) for col in gdf.columns):
self._write_table(gdf, 0, True)
return
# Generate `ind` array to map each row to an output file.
# This approach is certainly more optimized for shuffling
# than it is for non-shuffling, but using a single code
# path is probably worth the (possible) minor overhead.
nrows = gdf.shape[0]
typ = np.min_scalar_type(nrows * 2)
if self.shuffle:
ind = cp.random.choice(cp.arange(self.num_out_files, dtype=typ), nrows)
else:
ind = cp.arange(nrows, dtype=typ)
cp.floor_divide(ind, math.ceil(nrows / self.num_out_files), out=ind)
for x, group in enumerate(
gdf.scatter_by_map(ind, map_size=self.num_out_files, keep_index=False)
):
self.num_samples[x] += len(group)
if self.num_threads > 1:
self.queue.put((x, group))
else:
self._write_table(x, group)
# wait for all writes to finish before exiting
# (so that we aren't using memory)
if self.num_threads > 1:
self.queue.join()
|
[{'piece_type': 'other', 'piece_content': "df = cudf.DataFrame({'doc_id': [1, 1, 2, 2, 3, 3, 4, 4], 'category_id': [1, 2, 3, 3, 5, 6, 6, 1], 'confidence_level': [0.92, 0.251, 0.352, 0.359, 0.978, 0.988, 0.978, 0.988]})\\n\\ndf_grouped = df.groupby('doc_id', as_index=False).agg({'category_id': ['collect'], 'confidence_level': ['collect']})\\n\\ndf_grouped.columns= df_grouped.columns.get_level_values(0)\\n\\ndf2 = cudf.DataFrame({'doc_id': [1, 2, 2, 3, 4, 3, 7, 8], 'category_id': [1, 2, 4, 3, 6, 6, 5, 2], 'ad_id': [1, 2, 3, 4, 4, 5, 10, 12],\\n'source_id': [1200, 1210, 1450, np.nan, 1330, 1200, 1500, 1350]})\\n\\ncolumns_ext = ['doc_id', 'category_id', 'confidence_level']\\nkind_ext='cudf'\\nproc = nvt.Workflow(\\ncat_names= ['doc_id', 'category_id', 'ad_id', 'source_id'],\\ncont_names=[],\\nlabel_name=[])\\n\\nproc.add_preprocess(JoinExternal(df_grouped, on= ['doc_id'], on_ext= ['doc_id'], kind_ext=kind_ext, columns_ext=columns_ext, cache='device', how='left'))\\ntrain_dataset = nvt.Dataset(df2)\\nproc.apply(train_dataset, apply_offline=True, record_stats=True, output_path='./output/', shuffle=True, out_files_per_proc=1)"}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-19-f93c44c3b381> in <module>\\n11 proc.add_preprocess(JoinExternal(df_grouped, on= [\\'doc_id\\'], on_ext= [\\'doc_id\\'], kind_ext=kind_ext, columns_ext=columns_ext, cache=\\'device\\', how=\\'left\\'))\\n12 train_dataset = nvt.Dataset(df2)\\n---> 13 proc.apply(train_dataset, apply_offline=True, record_stats=True, output_path=\\'./output/\\', shuffle=True, out_files_per_proc=1)\\n\\n~/ronaya/NVTabular/nvtabular/workflow.py in apply(self, dataset, apply_offline, record_stats, shuffle, output_path, output_format, out_files_per_proc, num_io_threads)\\n738 output_format=output_format,\\n739 out_files_per_proc=out_files_per_proc,\\n--> 740 num_io_threads=num_io_threads,\\n741 )\\n742 else:\\n\\n~/ronaya/NVTabular/nvtabular/workflow.py in build_and_process_graph(self, dataset, end_phase, output_path, record_stats, shuffle, output_format, out_files_per_proc, apply_ops, num_io_threads)\\n845 shuffle=shuffle,\\n846 out_files_per_proc=out_files_per_proc,\\n--> 847 num_threads=num_io_threads,\\n848 )\\n849\\n\\n~/ronaya/NVTabular/nvtabular/workflow.py in ddf_to_dataset(self, output_path, shuffle, out_files_per_proc, output_format, num_threads)\\n931 output_format,\\n932 self.client,\\n--> 933 num_threads,\\n934 )\\n935 return\\n\\n~/ronaya/NVTabular/nvtabular/io/dask.py in _ddf_to_dataset(ddf, fs, output_path, shuffle, out_files_per_proc, cat_names, cont_names, label_names, output_format, client, num_threads)\\n110 out = client.compute(out).result()\\n111 else:\\n--> 112 out = dask.compute(out, scheduler="synchronous")[0]\\n113\\n114 # Follow-up Shuffling and _metadata creation\\n\\n~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/base.py in compute(*args, **kwargs)\\n450 postcomputes.append(x.__dask_postcompute__())\\n451\\n--> 452 results = schedule(dsk, keys, **kwargs)\\n453 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])\\n454\\n\\n~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in get_sync(dsk, keys, **kwargs)\\n525 """\\n526 kwargs.pop("num_workers", None) # if num_workers present, remove it\\n--> 527 return get_async(apply_sync, 1, dsk, keys, **kwargs)\\n528\\n529\\n\\n~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)\\n492\\n493 while state["ready"] and len(state["running"]) < num_workers:\\n--> 494 fire_task()\\n495\\n496 succeeded = True\\n\\n~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in fire_task()\\n464 pack_exception,\\n465 ),\\n--> 466 callback=queue.put,\\n467 )\\n468\\n\\n~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in apply_sync(func, args, kwds, callback)\\n514 def apply_sync(func, args=(), kwds={}, callback=None):\\n515 """ A naive synchronous version of apply_async """\\n--> 516 res = func(*args, **kwds)\\n517 if callback is not None:\\n518 callback(res)\\n\\n~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)\\n225 failed = False\\n226 except BaseException as e:\\n--> 227 result = pack_exception(e, dumps)\\n228 failed = True\\n229 return key, result, failed\\n\\n~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)\\n220 try:\\n221 task, data = loads(task_info)\\n--> 222 result = _execute_task(task, data)\\n223 id = get_id()\\n224 result = dumps((result, id))\\n\\n~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/core.py in _execute_task(arg, cache, dsk)\\n119 # temporaries by their reference count and can execute certain\\n120 # operations in-place.\\n--> 121 return func(*(_execute_task(a, cache) for a in args))\\n122 elif not ishashable(arg):\\n123 return arg\\n\\n~/miniconda3/envs/1019/lib/python3.7/contextlib.py in inner(*args, **kwds)\\n72 def inner(*args, **kwds):\\n73 with self._recreate_cm():\\n---> 74 return func(*args, **kwds)\\n75 return inner\\n76\\n\\n~/ronaya/NVTabular/nvtabular/io/dask.py in _write_output_partition(gdf, processed_path, shuffle, out_files_per_proc, fs, cat_names, cont_names, label_names, output_format, num_threads)\\n61\\n62 # Add data\\n---> 63 writer.add_data(gdf)\\n64\\n65 return gdf_size\\n\\n~/miniconda3/envs/1019/lib/python3.7/contextlib.py in inner(*args, **kwds)\\n72 def inner(*args, **kwds):\\n73 with self._recreate_cm():\\n---> 74 return func(*args, **kwds)\\n75 return inner\\n76\\n\\n~/ronaya/NVTabular/nvtabular/io/writer.py in add_data(self, gdf)\\n125 # in parquet\\n126 if any(is_list_dtype(gdf[col].dtype) for col in gdf.columns):\\n--> 127 self._write_table(gdf, 0, True)\\n128 return\\n129\\n\\n~/ronaya/NVTabular/nvtabular/io/parquet.py in _write_table(self, idx, data, has_list_column)\\n210 # write out a new file, rather than stream multiple chunks to a single file\\n211 filename = self._get_filename(len(self.data_paths))\\n--> 212 data.to_parquet(filename)\\n213 self.data_paths.append(filename)\\n214 else:\\n\\nAttributeError: \\'int\\' object has no attribute \\'to_parquet\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-19-f93c44c3b381> in <module>
11 proc.add_preprocess(JoinExternal(df_grouped, on= ['doc_id'], on_ext= ['doc_id'], kind_ext=kind_ext, columns_ext=columns_ext, cache='device', how='left'))
12 train_dataset = nvt.Dataset(df2)
---> 13 proc.apply(train_dataset, apply_offline=True, record_stats=True, output_path='./output/', shuffle=True, out_files_per_proc=1)
~/ronaya/NVTabular/nvtabular/workflow.py in apply(self, dataset, apply_offline, record_stats, shuffle, output_path, output_format, out_files_per_proc, num_io_threads)
738 output_format=output_format,
739 out_files_per_proc=out_files_per_proc,
--> 740 num_io_threads=num_io_threads,
741 )
742 else:
~/ronaya/NVTabular/nvtabular/workflow.py in build_and_process_graph(self, dataset, end_phase, output_path, record_stats, shuffle, output_format, out_files_per_proc, apply_ops, num_io_threads)
845 shuffle=shuffle,
846 out_files_per_proc=out_files_per_proc,
--> 847 num_threads=num_io_threads,
848 )
849
~/ronaya/NVTabular/nvtabular/workflow.py in ddf_to_dataset(self, output_path, shuffle, out_files_per_proc, output_format, num_threads)
931 output_format,
932 self.client,
--> 933 num_threads,
934 )
935 return
~/ronaya/NVTabular/nvtabular/io/dask.py in _ddf_to_dataset(ddf, fs, output_path, shuffle, out_files_per_proc, cat_names, cont_names, label_names, output_format, client, num_threads)
110 out = client.compute(out).result()
111 else:
--> 112 out = dask.compute(out, scheduler="synchronous")[0]
113
114 # Follow-up Shuffling and _metadata creation
~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/base.py in compute(*args, **kwargs)
450 postcomputes.append(x.__dask_postcompute__())
451
--> 452 results = schedule(dsk, keys, **kwargs)
453 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
454
~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in get_sync(dsk, keys, **kwargs)
525 """
526 kwargs.pop("num_workers", None) # if num_workers present, remove it
--> 527 return get_async(apply_sync, 1, dsk, keys, **kwargs)
528
529
~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
492
493 while state["ready"] and len(state["running"]) < num_workers:
--> 494 fire_task()
495
496 succeeded = True
~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in fire_task()
464 pack_exception,
465 ),
--> 466 callback=queue.put,
467 )
468
~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in apply_sync(func, args, kwds, callback)
514 def apply_sync(func, args=(), kwds={}, callback=None):
515 """ A naive synchronous version of apply_async """
--> 516 res = func(*args, **kwds)
517 if callback is not None:
518 callback(res)
~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
225 failed = False
226 except BaseException as e:
--> 227 result = pack_exception(e, dumps)
228 failed = True
229 return key, result, failed
~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
220 try:
221 task, data = loads(task_info)
--> 222 result = _execute_task(task, data)
223 id = get_id()
224 result = dumps((result, id))
~/miniconda3/envs/1019/lib/python3.7/site-packages/dask/core.py in _execute_task(arg, cache, dsk)
119 # temporaries by their reference count and can execute certain
120 # operations in-place.
--> 121 return func(*(_execute_task(a, cache) for a in args))
122 elif not ishashable(arg):
123 return arg
~/miniconda3/envs/1019/lib/python3.7/contextlib.py in inner(*args, **kwds)
72 def inner(*args, **kwds):
73 with self._recreate_cm():
---> 74 return func(*args, **kwds)
75 return inner
76
~/ronaya/NVTabular/nvtabular/io/dask.py in _write_output_partition(gdf, processed_path, shuffle, out_files_per_proc, fs, cat_names, cont_names, label_names, output_format, num_threads)
61
62 # Add data
---> 63 writer.add_data(gdf)
64
65 return gdf_size
~/miniconda3/envs/1019/lib/python3.7/contextlib.py in inner(*args, **kwds)
72 def inner(*args, **kwds):
73 with self._recreate_cm():
---> 74 return func(*args, **kwds)
75 return inner
76
~/ronaya/NVTabular/nvtabular/io/writer.py in add_data(self, gdf)
125 # in parquet
126 if any(is_list_dtype(gdf[col].dtype) for col in gdf.columns):
--> 127 self._write_table(gdf, 0, True)
128 return
129
~/ronaya/NVTabular/nvtabular/io/parquet.py in _write_table(self, idx, data, has_list_column)
210 # write out a new file, rather than stream multiple chunks to a single file
211 filename = self._get_filename(len(self.data_paths))
--> 212 data.to_parquet(filename)
213 self.data_paths.append(filename)
214 else:
AttributeError: 'int' object has no attribute 'to_parquet'
|
AttributeError
|
def __init__(
self,
paths,
part_size,
storage_options,
row_groups_per_part=None,
legacy=False,
batch_size=None,
):
# TODO: Improve dask_cudf.read_parquet performance so that
# this class can be slimmed down.
super().__init__(paths, part_size, storage_options)
self.batch_size = batch_size
self._metadata, self._base = self.metadata
self._pieces = None
if row_groups_per_part is None:
file_path = self._metadata.row_group(0).column(0).file_path
path0 = (
self.fs.sep.join([self._base, file_path])
if file_path != ""
else self._base # This is a single file
)
if row_groups_per_part is None:
rg_byte_size_0 = _memory_usage(cudf.io.read_parquet(path0, row_groups=0, row_group=0))
row_groups_per_part = self.part_size / rg_byte_size_0
if row_groups_per_part < 1.0:
warnings.warn(
f"Row group size {rg_byte_size_0} is bigger than requested part_size "
f"{self.part_size}"
)
row_groups_per_part = 1.0
self.row_groups_per_part = int(row_groups_per_part)
assert self.row_groups_per_part > 0
|
def __init__(
self,
paths,
part_size,
storage_options,
row_groups_per_part=None,
legacy=False,
batch_size=None,
):
# TODO: Improve dask_cudf.read_parquet performance so that
# this class can be slimmed down.
super().__init__(paths, part_size, storage_options)
self.batch_size = batch_size
self._metadata, self._base = self.metadata
self._pieces = None
if row_groups_per_part is None:
file_path = self._metadata.row_group(0).column(0).file_path
path0 = (
self.fs.sep.join([self._base, file_path])
if file_path != ""
else self._base # This is a single file
)
if row_groups_per_part is None:
rg_byte_size_0 = (
cudf.io.read_parquet(path0, row_groups=0, row_group=0)
.memory_usage(deep=True, index=True)
.sum()
)
row_groups_per_part = self.part_size / rg_byte_size_0
if row_groups_per_part < 1.0:
warnings.warn(
f"Row group size {rg_byte_size_0} is bigger than requested part_size "
f"{self.part_size}"
)
row_groups_per_part = 1.0
self.row_groups_per_part = int(row_groups_per_part)
assert self.row_groups_per_part > 0
|
[{'piece_type': 'other', 'piece_content': 'python dataloader_bench.py torch <PATH TO Folder with Parquet Files on local> parquet 0.2'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 106, in <module>\\nmain(args)\\nFile "main.py", line 61, in main\\ntrain_paths, engine="parquet", part_mem_fraction=float(args.gpu_mem_frac)\\nFile "/root/miniconda/lib/python3.7/site-packages/nvtabular/io/dataset.py", line 224, in __init__\\npaths, part_size, storage_options=storage_options, **kwargs\\nFile "/root/miniconda/lib/python3.7/site-packages/nvtabular/io/parquet.py", line 69, in __init__\\n.memory_usage(deep=True, index=True)\\nFile "/root/miniconda/lib/python3.7/site-packages/cudf/core/dataframe.py", line 842, in memory_usage\\nsizes = [col._memory_usage(deep=deep) for col in self._data.columns]\\nFile "/root/miniconda/lib/python3.7/site-packages/cudf/core/dataframe.py", line 842, in <listcomp>\\nsizes = [col._memory_usage(deep=deep) for col in self._data.columns]\\nFile "/root/miniconda/lib/python3.7/site-packages/cudf/core/column/column.py", line 299, in _memory_usage\\nreturn self.__sizeof__()\\nFile "/root/miniconda/lib/python3.7/site-packages/cudf/core/column/column.py", line 183, in __sizeof__\\nn = self.data.size\\nFile "cudf/_lib/column.pyx", line 99, in cudf._lib.column.Column.data.__get__\\nAttributeError: \\'ListDtype\\' object has no attribute \\'itemsize\\''}, {'piece_type': 'other', 'piece_content': '# packages in environment at /root/miniconda:\\n#\\n# Name Version Build Channel\\n_libgcc_mutex 0.1 main\\n_pytorch_select 0.1 cpu_0\\nabseil-cpp 20200225.2 he1b5a44_2 conda-forge\\narrow-cpp 0.17.1 py37h1234567_11_cuda conda-forge\\narrow-cpp-proc 1.0.1 cuda conda-forge\\naws-sdk-cpp 1.7.164 hba45d7a_2 conda-forge\\nblas 1.0 mkl\\nbokeh 2.2.2 py37_0\\nboost-cpp 1.72.0 h9359b55_3 conda-forge\\nbrotli 1.0.9 he6710b0_2\\nbrotlipy 0.7.0 py37h7b6447c_1000\\nbzip2 1.0.8 h7b6447c_0\\nc-ares 1.16.1 h7b6447c_0\\nca-certificates 2020.10.14 0\\ncertifi 2020.6.20 py37_0\\ncffi 1.14.3 py37he30daa8_0\\nchardet 3.0.4 py37_1003\\nclick 7.1.2 py_0\\ncloudpickle 1.6.0 py_0\\nconda 4.9.0 py37he5f6b98_0 conda-forge\\nconda-package-handling 1.7.2 py37h03888b9_0\\ncryptography 3.1.1 py37h1ba5d50_0\\ncudatoolkit 10.2.89 h6bb024c_0 nvidia\\ncudf 0.15.0 cuda_10.2_py37_g71cb8c0e0_0 rapidsai\\ncudnn 7.6.5 cuda10.2_0\\ncupy 7.8.0 py37h940342b_1 conda-forge\\ncurl 7.71.1 hbc83047_1\\ncython 0.29.21 pypi_0 pypi\\ncytoolz 0.11.0 py37h7b6447c_0\\ndask 2.30.0 py_0\\ndask-core 2.30.0 py_0\\ndask-cudf 0.15.0 py37_g71cb8c0e0_0 rapidsai\\ndistributed 2.30.0 py37_0\\ndlpack 0.3 he6710b0_1\\ndouble-conversion 3.1.5 he6710b0_1\\nfastavro 1.0.0.post1 py37h7b6447c_0\\nfastrlock 0.5 py37he6710b0_0\\nfreetype 2.10.3 h5ab3b9f_0\\nfsspec 0.8.3 py_0\\ngflags 2.2.2 he6710b0_0\\nglog 0.4.0 he6710b0_0\\ngrpc-cpp 1.30.2 heedbac9_0 conda-forge\\nheapdict 1.0.1 py_0\\nicu 67.1 he1b5a44_0 conda-forge\\nidna 2.10 py_0\\nintel-openmp 2019.4 243\\njinja2 2.11.2 py_0\\njpeg 9b h024ee3a_2\\nkrb5 1.18.2 h173b8e3_0\\nlcms2 2.11 h396b838_0\\nld_impl_linux-64 2.33.1 h53a641e_7\\nlibcudf 0.15.0 cuda10.2_g71cb8c0e0_0 rapidsai\\nlibcurl 7.71.1 h20c2e04_1\\nlibedit 3.1.20191231 h14c3975_1\\nlibevent 2.1.10 hcdb4288_3 conda-forge\\nlibffi 3.3 he6710b0_2\\nlibgcc-ng 9.1.0 hdf63c60_0\\nlibllvm10 10.0.1 hbcb73fb_5\\nlibpng 1.6.37 hbc83047_0\\nlibprotobuf 3.12.4 hd408876_0\\nlibrmm 0.15.0 cuda10.2_g8005ca5_0 rapidsai\\nlibssh2 1.9.0 h1ba5d50_1\\nlibstdcxx-ng 9.1.0 hdf63c60_0\\nlibthrift 0.13.0 hbe8ec66_6 conda-forge\\nlibtiff 4.1.0 h2733197_1\\nllvmlite 0.34.0 py37h269e1b5_4\\nlocket 0.2.0 py37_1\\nlz4-c 1.9.2 heb0550a_3\\nmarkupsafe 1.1.1 py37h14c3975_1\\nmkl 2019.4 243\\nmkl-service 2.3.0 py37he904b0f_0\\nmkl_fft 1.2.0 py37h23d657b_0\\nmkl_random 1.1.0 py37hd6b4f25_0\\nmsgpack-python 1.0.0 py37hfd86e86_1\\nnccl 2.7.8.1 hc6a2c23_1 conda-forge\\nncurses 6.2 he6710b0_1\\nninja 1.10.1 py37hfd86e86_0\\nnumba 0.51.2 py37h04863e7_1\\nnumpy 1.19.1 py37hbc911f0_0\\nnumpy-base 1.19.1 py37hfa32c7d_0\\nnvtabular 0.2.0 cudaunknown_py37_0 nvidia/label/nvidia\\nolefile 0.46 py37_0\\nopenssl 1.1.1h h7b6447c_0\\npackaging 20.4 py_0\\npandas 1.1.3 py37he6710b0_0\\nparquet-cpp 1.5.1 2 conda-forge\\npartd 1.1.0 py_0\\npillow 8.0.0 py37h9a89aac_0\\npip 20.2.3 py37_0\\npsutil 5.7.2 py37h7b6447c_0\\npyarrow 0.17.1 py37h1234567_11_cuda conda-forge\\npycosat 0.6.3 py37h7b6447c_0\\npycparser 2.19 pypi_0 pypi\\npynvml 8.0.4 py_1 conda-forge\\npyopenssl 19.1.0 py_1\\npyparsing 2.4.7 py_0\\npysocks 1.7.1 py37_1\\npython 3.7.9 h7579374_0\\npython-dateutil 2.8.1 py_0\\npython_abi 3.7 1_cp37m conda-forge\\npytorch 1.5.0 cpu_py37hd91cbb3_0\\npytz 2020.1 py_0\\npyyaml 5.3.1 py37h7b6447c_1\\nre2 2020.07.06 he1b5a44_1 conda-forge\\nreadline 8.0 h7b6447c_0\\nrequests 2.24.0 py_0\\nrmm 0.15.0 cuda_10.2_py37_g8005ca5_0 rapidsai\\nruamel_yaml 0.15.87 py37h7b6447c_1\\nsetuptools 50.3.0 py37hb0f4dca_1\\nsix 1.15.0 py_0\\nsnappy 1.1.8 he6710b0_0\\nsortedcontainers 2.2.2 py_0\\nspdlog 1.8.0 hfd86e86_1\\nsqlite 3.33.0 h62c20be_0\\ntbb 2020.3 hfd86e86_0\\ntblib 1.7.0 py_0\\nthrift-compiler 0.13.0 hbe8ec66_6 conda-forge\\nthrift-cpp 0.13.0 6 conda-forge\\ntk 8.6.10 hbc83047_0\\ntoolz 0.11.1 py_0\\ntornado 6.0.4 py37h7b6447c_1\\ntqdm 4.50.2 py_0\\ntyping_extensions 3.7.4.3 py_0\\nurllib3 1.25.10 py_0\\nwheel 0.35.1 py_0\\nxz 5.2.5 h7b6447c_0\\nyaml 0.2.5 h7b6447c_0\\nzict 2.0.0 py_0\\nzlib 1.2.11 h7b6447c_3\\nzstd 1.4.5 h9ceee32_0'}]
|
Traceback (most recent call last):
File "main.py", line 106, in <module>
main(args)
File "main.py", line 61, in main
train_paths, engine="parquet", part_mem_fraction=float(args.gpu_mem_frac)
File "/root/miniconda/lib/python3.7/site-packages/nvtabular/io/dataset.py", line 224, in __init__
paths, part_size, storage_options=storage_options, **kwargs
File "/root/miniconda/lib/python3.7/site-packages/nvtabular/io/parquet.py", line 69, in __init__
.memory_usage(deep=True, index=True)
File "/root/miniconda/lib/python3.7/site-packages/cudf/core/dataframe.py", line 842, in memory_usage
sizes = [col._memory_usage(deep=deep) for col in self._data.columns]
File "/root/miniconda/lib/python3.7/site-packages/cudf/core/dataframe.py", line 842, in <listcomp>
sizes = [col._memory_usage(deep=deep) for col in self._data.columns]
File "/root/miniconda/lib/python3.7/site-packages/cudf/core/column/column.py", line 299, in _memory_usage
return self.__sizeof__()
File "/root/miniconda/lib/python3.7/site-packages/cudf/core/column/column.py", line 183, in __sizeof__
n = self.data.size
File "cudf/_lib/column.pyx", line 99, in cudf._lib.column.Column.data.__get__
AttributeError: 'ListDtype' object has no attribute 'itemsize'
|
AttributeError
|
def __init__(self, *args, **kwargs):
super().__init__(*args)
self._meta = {}
self.csv_kwargs = kwargs
self.names = self.csv_kwargs.get("names", None)
# CSV reader needs a list of files
# (Assume flat directory structure if this is a dir)
if len(self.paths) == 1 and self.fs.isdir(self.paths[0]):
self.paths = self.fs.glob(self.fs.sep.join([self.paths[0], "*"]))
|
def __init__(self, *args, **kwargs):
super().__init__(*args)
self._meta = {}
self.names = kwargs.pop("names", None)
self.csv_kwargs = kwargs
# CSV reader needs a list of files
# (Assume flat directory structure if this is a dir)
if len(self.paths) == 1 and self.fs.isdir(self.paths[0]):
self.paths = self.fs.glob(self.fs.sep.join([self.paths[0], "*"]))
|
[{'piece_type': 'error message', 'piece_content': "AttributeErrorTraceback (most recent call last)\\n<ipython-input-1-84910288ec3f> in <module>\\n44 del gdf\\n45 path_out = '/raid/criteo/tests/jp_csv_orig/'\\n---> 46 file_to_pq(train_set, 'csv', output_folder=path_out, cols=cols, dtypes=dtypes)\\n\\n<ipython-input-1-84910288ec3f> in file_to_pq(target_files, file_type, output_folder, cols, dtypes)\\n34 old_file_path = None\\n35 writer = None\\n---> 36 for gdf in tar:\\n37 # gdf.to_parquet(output_folder)\\n38 file_path = os.path.join(output_folder, os.path.split(tar.itr.file_path)[1].split('.')[0])\\n\\n/nvtabular/nvtabular/io.py in __iter__(self)\\n329 def __iter__(self):\\n330 for path in self.paths:\\n--> 331 yield from GPUFileIterator(path, **self.kwargs)\\n332\\n333\\n\\n/nvtabular/nvtabular/io.py in __iter__(self)\\n271 for chunk in self.engine:\\n272 if self.dtypes:\\n--> 273 self._set_dtypes(chunk)\\n274 yield chunk\\n275 chunk = None\\n\\nAttributeError: 'GPUFileIterator' object has no attribute '_set_dtypes'"}]
|
AttributeErrorTraceback (most recent call last)
<ipython-input-1-84910288ec3f> in <module>
44 del gdf
45 path_out = '/raid/criteo/tests/jp_csv_orig/'
---> 46 file_to_pq(train_set, 'csv', output_folder=path_out, cols=cols, dtypes=dtypes)
<ipython-input-1-84910288ec3f> in file_to_pq(target_files, file_type, output_folder, cols, dtypes)
34 old_file_path = None
35 writer = None
---> 36 for gdf in tar:
37 # gdf.to_parquet(output_folder)
38 file_path = os.path.join(output_folder, os.path.split(tar.itr.file_path)[1].split('.')[0])
/nvtabular/nvtabular/io.py in __iter__(self)
329 def __iter__(self):
330 for path in self.paths:
--> 331 yield from GPUFileIterator(path, **self.kwargs)
332
333
/nvtabular/nvtabular/io.py in __iter__(self)
271 for chunk in self.engine:
272 if self.dtypes:
--> 273 self._set_dtypes(chunk)
274 yield chunk
275 chunk = None
AttributeError: 'GPUFileIterator' object has no attribute '_set_dtypes'
|
AttributeError
|
def to_ddf(self, columns=None):
return dask_cudf.read_csv(self.paths, chunksize=self.part_size, **self.csv_kwargs)[columns]
|
def to_ddf(self, columns=None):
return dask_cudf.read_csv(
self.paths, names=self.names, chunksize=self.part_size, **self.csv_kwargs
)[columns]
|
[{'piece_type': 'error message', 'piece_content': "AttributeErrorTraceback (most recent call last)\\n<ipython-input-1-84910288ec3f> in <module>\\n44 del gdf\\n45 path_out = '/raid/criteo/tests/jp_csv_orig/'\\n---> 46 file_to_pq(train_set, 'csv', output_folder=path_out, cols=cols, dtypes=dtypes)\\n\\n<ipython-input-1-84910288ec3f> in file_to_pq(target_files, file_type, output_folder, cols, dtypes)\\n34 old_file_path = None\\n35 writer = None\\n---> 36 for gdf in tar:\\n37 # gdf.to_parquet(output_folder)\\n38 file_path = os.path.join(output_folder, os.path.split(tar.itr.file_path)[1].split('.')[0])\\n\\n/nvtabular/nvtabular/io.py in __iter__(self)\\n329 def __iter__(self):\\n330 for path in self.paths:\\n--> 331 yield from GPUFileIterator(path, **self.kwargs)\\n332\\n333\\n\\n/nvtabular/nvtabular/io.py in __iter__(self)\\n271 for chunk in self.engine:\\n272 if self.dtypes:\\n--> 273 self._set_dtypes(chunk)\\n274 yield chunk\\n275 chunk = None\\n\\nAttributeError: 'GPUFileIterator' object has no attribute '_set_dtypes'"}]
|
AttributeErrorTraceback (most recent call last)
<ipython-input-1-84910288ec3f> in <module>
44 del gdf
45 path_out = '/raid/criteo/tests/jp_csv_orig/'
---> 46 file_to_pq(train_set, 'csv', output_folder=path_out, cols=cols, dtypes=dtypes)
<ipython-input-1-84910288ec3f> in file_to_pq(target_files, file_type, output_folder, cols, dtypes)
34 old_file_path = None
35 writer = None
---> 36 for gdf in tar:
37 # gdf.to_parquet(output_folder)
38 file_path = os.path.join(output_folder, os.path.split(tar.itr.file_path)[1].split('.')[0])
/nvtabular/nvtabular/io.py in __iter__(self)
329 def __iter__(self):
330 for path in self.paths:
--> 331 yield from GPUFileIterator(path, **self.kwargs)
332
333
/nvtabular/nvtabular/io.py in __iter__(self)
271 for chunk in self.engine:
272 if self.dtypes:
--> 273 self._set_dtypes(chunk)
274 yield chunk
275 chunk = None
AttributeError: 'GPUFileIterator' object has no attribute '_set_dtypes'
|
AttributeError
|
def _predict(self, X):
"""Collect results from clf.predict calls."""
if self.refit:
return np.asarray([clf.predict(X) for clf in self.clfs_]).T
else:
return np.asarray([self.le_.transform(clf.predict(X))
for clf in self.clfs_]).T
|
def _predict(self, X):
"""Collect results from clf.predict calls."""
return np.asarray([clf.predict(X) for clf in self.clfs_]).T
|
[{'piece_type': 'reproducing source code', 'piece_content': "import numpy as np\\nfrom sklearn.ensemble import RandomForestClassifier\\nfrom mlxtend.classifier import EnsembleVoteClassifier\\n\\ndata = np.array([0, 1, 2, 3, 0, 1, 2, 3])[:, np.newaxis]\\nlabels = ['a', 'b', 'c', 'd', 'a', 'b', 'c', 'd']\\n\\ntest = np.array([0, 1])[:, np.newaxis]\\n\\nrf = RandomForestClassifier()\\nrf.fit(data, labels)\\nprint(rf.predict(test)) # output: ['a', 'b']\\n\\nclf = EnsembleVoteClassifier(clfs=[rf, rf], refit=False)\\nclf.fit(data, labels)\\nprint(clf.predict(test)) # <-- error"}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "/_mlxtend_bug/reproduce.py", line 16, in <module>\\nprint(clf.predict(test))\\nFile "/venv/py3/lib/python3.4/site-packages/mlxtend/classifier/ensemble_vote.py", line 197, in predict\\narr=predictions)\\nFile "/venv/py3/lib/python3.4/site-packages/numpy/lib/shape_base.py", line 132, in apply_along_axis\\nres = asanyarray(func1d(inarr_view[ind0], *args, **kwargs))\\nFile "/venv/py3/lib/python3.4/site-packages/mlxtend/classifier/ensemble_vote.py", line 195, in <lambda>\\nweights=self.weights)),\\nTypeError: Cannot cast array data from dtype(\\'<U1\\') to dtype(\\'int64\\') according to the rule \\'safe\\''}, {'piece_type': 'source code', 'piece_content': "else: # 'hard' voting\\npredictions = self._predict(X)\\n\\nmaj = np.apply_along_axis(lambda x:\\nnp.argmax(np.bincount(x,\\nweights=self.weights)),\\naxis=1,\\narr=predictions)"}, {'piece_type': 'source code', 'piece_content': "else: # 'hard' voting\\npredictions = self._predict(X)\\n\\nmaj = np.apply_along_axis(lambda x:\\nnp.argmax(np.bincount(self.le_.transform(x),\\nweights=self.weights)),\\naxis=1,\\narr=predictions)"}]
|
Traceback (most recent call last):
File "/_mlxtend_bug/reproduce.py", line 16, in <module>
print(clf.predict(test))
File "/venv/py3/lib/python3.4/site-packages/mlxtend/classifier/ensemble_vote.py", line 197, in predict
arr=predictions)
File "/venv/py3/lib/python3.4/site-packages/numpy/lib/shape_base.py", line 132, in apply_along_axis
res = asanyarray(func1d(inarr_view[ind0], *args, **kwargs))
File "/venv/py3/lib/python3.4/site-packages/mlxtend/classifier/ensemble_vote.py", line 195, in <lambda>
weights=self.weights)),
TypeError: Cannot cast array data from dtype('<U1') to dtype('int64') according to the rule 'safe'
|
TypeError
|
def transform(
self,
xx: Any,
yy: Any,
zz: Any = None,
tt: Any = None,
radians: bool = False,
errcheck: bool = False,
direction: Union[TransformDirection, str] = TransformDirection.FORWARD,
) -> Any:
"""
Transform points between two coordinate systems.
.. versionadded:: 2.1.1 errcheck
.. versionadded:: 2.2.0 direction
Parameters
----------
xx: scalar or array (numpy or python)
Input x coordinate(s).
yy: scalar or array (numpy or python)
Input y coordinate(s).
zz: scalar or array (numpy or python), optional
Input z coordinate(s).
tt: scalar or array (numpy or python), optional
Input time coordinate(s).
radians: boolean, optional
If True, will expect input data to be in radians and will return radians
if the projection is geographic. Default is False (degrees). Ignored for
pipeline transformations.
errcheck: boolean, optional (default False)
If True an exception is raised if the transformation is invalid.
By default errcheck=False and an invalid transformation
returns ``inf`` and no exception is raised.
direction: pyproj.enums.TransformDirection, optional
The direction of the transform.
Default is :attr:`pyproj.enums.TransformDirection.FORWARD`.
Example:
>>> from pyproj import Transformer
>>> transformer = Transformer.from_crs("epsg:4326", "epsg:3857")
>>> x3, y3 = transformer.transform(33, 98)
>>> "%.3f %.3f" % (x3, y3)
'10909310.098 3895303.963'
>>> pipeline_str = (
... "+proj=pipeline +step +proj=longlat +ellps=WGS84 "
... "+step +proj=unitconvert +xy_in=rad +xy_out=deg"
... )
>>> pipe_trans = Transformer.from_pipeline(pipeline_str)
>>> xt, yt = pipe_trans.transform(2.1, 0.001)
>>> "%.3f %.3f" % (xt, yt)
'2.100 0.001'
>>> transproj = Transformer.from_crs(
... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'},
... "EPSG:4326",
... always_xy=True,
... )
>>> xpj, ypj, zpj = transproj.transform(
... -2704026.010,
... -4253051.810,
... 3895878.820,
... radians=True,
... )
>>> "%.3f %.3f %.3f" % (xpj, ypj, zpj)
'-2.137 0.661 -20.531'
>>> transprojr = Transformer.from_crs(
... "EPSG:4326",
... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'},
... always_xy=True,
... )
>>> xpjr, ypjr, zpjr = transprojr.transform(xpj, ypj, zpj, radians=True)
>>> "%.3f %.3f %.3f" % (xpjr, ypjr, zpjr)
'-2704026.010 -4253051.810 3895878.820'
>>> transformer = Transformer.from_proj("epsg:4326", 4326, skip_equivalent=True)
>>> xeq, yeq = transformer.transform(33, 98)
>>> "%.0f %.0f" % (xeq, yeq)
'33 98'
"""
# process inputs, making copies that support buffer API.
inx, xisfloat, xislist, xistuple = _copytobuffer(xx)
iny, yisfloat, yislist, yistuple = _copytobuffer(yy)
if zz is not None:
inz, zisfloat, zislist, zistuple = _copytobuffer(zz)
else:
inz = None
if tt is not None:
intime, tisfloat, tislist, tistuple = _copytobuffer(tt)
else:
intime = None
# call pj_transform. inx,iny,inz buffers modified in place.
self._transformer._transform(
inx,
iny,
inz=inz,
intime=intime,
direction=direction,
radians=radians,
errcheck=errcheck,
)
# if inputs were lists, tuples or floats, convert back.
outx = _convertback(xisfloat, xislist, xistuple, inx)
outy = _convertback(yisfloat, yislist, xistuple, iny)
return_data = (outx, outy)
if inz is not None:
return_data += ( # type: ignore
_convertback(zisfloat, zislist, zistuple, inz),
)
if intime is not None:
return_data += ( # type: ignore
_convertback(tisfloat, tislist, tistuple, intime),
)
return return_data
|
def transform(
self,
xx: Any,
yy: Any,
zz: Any = None,
tt: Any = None,
radians: bool = False,
errcheck: bool = False,
direction: Union[TransformDirection, str] = TransformDirection.FORWARD,
) -> Any:
"""
Transform points between two coordinate systems.
.. versionadded:: 2.1.1 errcheck
.. versionadded:: 2.2.0 direction
Parameters
----------
xx: scalar or array (numpy or python)
Input x coordinate(s).
yy: scalar or array (numpy or python)
Input y coordinate(s).
zz: scalar or array (numpy or python), optional
Input z coordinate(s).
tt: scalar or array (numpy or python), optional
Input time coordinate(s).
radians: boolean, optional
If True, will expect input data to be in radians and will return radians
if the projection is geographic. Default is False (degrees). Ignored for
pipeline transformations.
errcheck: boolean, optional (default False)
If True an exception is raised if the transformation is invalid.
By default errcheck=False and an invalid transformation
returns ``inf`` and no exception is raised.
direction: pyproj.enums.TransformDirection, optional
The direction of the transform.
Default is :attr:`pyproj.enums.TransformDirection.FORWARD`.
Example:
>>> from pyproj import Transformer
>>> transformer = Transformer.from_crs("epsg:4326", "epsg:3857")
>>> x3, y3 = transformer.transform(33, 98)
>>> "%.3f %.3f" % (x3, y3)
'10909310.098 3895303.963'
>>> pipeline_str = (
... "+proj=pipeline +step +proj=longlat +ellps=WGS84 "
... "+step +proj=unitconvert +xy_in=rad +xy_out=deg"
... )
>>> pipe_trans = Transformer.from_pipeline(pipeline_str)
>>> xt, yt = pipe_trans.transform(2.1, 0.001)
>>> "%.3f %.3f" % (xt, yt)
'120.321 0.057'
>>> transproj = Transformer.from_crs(
... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'},
... "EPSG:4326",
... always_xy=True,
... )
>>> xpj, ypj, zpj = transproj.transform(
... -2704026.010,
... -4253051.810,
... 3895878.820,
... radians=True,
... )
>>> "%.3f %.3f %.3f" % (xpj, ypj, zpj)
'-2.137 0.661 -20.531'
>>> transprojr = Transformer.from_crs(
... "EPSG:4326",
... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'},
... always_xy=True,
... )
>>> xpjr, ypjr, zpjr = transprojr.transform(xpj, ypj, zpj, radians=True)
>>> "%.3f %.3f %.3f" % (xpjr, ypjr, zpjr)
'-2704026.010 -4253051.810 3895878.820'
>>> transformer = Transformer.from_proj("epsg:4326", 4326, skip_equivalent=True)
>>> xeq, yeq = transformer.transform(33, 98)
>>> "%.0f %.0f" % (xeq, yeq)
'33 98'
"""
# process inputs, making copies that support buffer API.
inx, xisfloat, xislist, xistuple = _copytobuffer(xx)
iny, yisfloat, yislist, yistuple = _copytobuffer(yy)
if zz is not None:
inz, zisfloat, zislist, zistuple = _copytobuffer(zz)
else:
inz = None
if tt is not None:
intime, tisfloat, tislist, tistuple = _copytobuffer(tt)
else:
intime = None
# call pj_transform. inx,iny,inz buffers modified in place.
self._transformer._transform(
inx,
iny,
inz=inz,
intime=intime,
direction=direction,
radians=radians,
errcheck=errcheck,
)
# if inputs were lists, tuples or floats, convert back.
outx = _convertback(xisfloat, xislist, xistuple, inx)
outy = _convertback(yisfloat, yislist, xistuple, iny)
return_data = (outx, outy)
if inz is not None:
return_data += ( # type: ignore
_convertback(zisfloat, zislist, zistuple, inz),
)
if intime is not None:
return_data += ( # type: ignore
_convertback(tisfloat, tislist, tistuple, intime),
)
return return_data
|
[{'piece_type': 'other', 'piece_content': 'echo 50 25 0 | cct +proj=pipeline +ellps=GRS80 +step +proj=cart'}, {'piece_type': 'other', 'piece_content': '3717892.6072 4430811.8715 2679074.4629 inf'}, {'piece_type': 'source code', 'piece_content': 'from pyproj import Transformer\\n\\nstring = "+proj=pipeline +ellps=GRS80 +step +proj=cart"\\npipe = Transformer.from_pipeline(string)\\npipe.transform(50, 25, 0, errcheck=True)'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "<stdin>", line 1, in <module>\\nFile "/usr/local/lib/python3.7/site-packages/pyproj/transformer.py", line 446, in transform\\nerrcheck=errcheck,\\nFile "pyproj/_transformer.pyx", line 463, in pyproj._transformer._Transformer._transform\\npyproj.exceptions.ProjError: transform error: latitude or longitude exceeded limits'}]
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.7/site-packages/pyproj/transformer.py", line 446, in transform
errcheck=errcheck,
File "pyproj/_transformer.pyx", line 463, in pyproj._transformer._Transformer._transform
pyproj.exceptions.ProjError: transform error: latitude or longitude exceeded limits
|
pyproj.exceptions.ProjError
|
def itransform(
self,
points: Any,
switch: bool = False,
time_3rd: bool = False,
radians: bool = False,
errcheck: bool = False,
direction: Union[TransformDirection, str] = TransformDirection.FORWARD,
) -> Iterator[Iterable]:
"""
Iterator/generator version of the function pyproj.Transformer.transform.
.. versionadded:: 2.1.1 errcheck
.. versionadded:: 2.2.0 direction
Parameters
----------
points: list
List of point tuples.
switch: boolean, optional
If True x, y or lon,lat coordinates of points are switched to y, x
or lat, lon. Default is False.
time_3rd: boolean, optional
If the input coordinates are 3 dimensional and the 3rd dimension is time.
radians: boolean, optional
If True, will expect input data to be in radians and will return radians
if the projection is geographic. Default is False (degrees). Ignored for
pipeline transformations.
errcheck: boolean, optional (default False)
If True an exception is raised if the transformation is invalid.
By default errcheck=False and an invalid transformation
returns ``inf`` and no exception is raised.
direction: pyproj.enums.TransformDirection, optional
The direction of the transform.
Default is :attr:`pyproj.enums.TransformDirection.FORWARD`.
Example:
>>> from pyproj import Transformer
>>> transformer = Transformer.from_crs(4326, 2100)
>>> points = [(22.95, 40.63), (22.81, 40.53), (23.51, 40.86)]
>>> for pt in transformer.itransform(points): '{:.3f} {:.3f}'.format(*pt)
'2221638.801 2637034.372'
'2212924.125 2619851.898'
'2238294.779 2703763.736'
>>> pipeline_str = (
... "+proj=pipeline +step +proj=longlat +ellps=WGS84 "
... "+step +proj=unitconvert +xy_in=rad +xy_out=deg"
... )
>>> pipe_trans = Transformer.from_pipeline(pipeline_str)
>>> for pt in pipe_trans.itransform([(2.1, 0.001)]):
... '{:.3f} {:.3f}'.format(*pt)
'2.100 0.001'
>>> transproj = Transformer.from_crs(
... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'},
... "EPSG:4326",
... always_xy=True,
... )
>>> for pt in transproj.itransform(
... [(-2704026.010, -4253051.810, 3895878.820)],
... radians=True,
... ):
... '{:.3f} {:.3f} {:.3f}'.format(*pt)
'-2.137 0.661 -20.531'
>>> transprojr = Transformer.from_crs(
... "EPSG:4326",
... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'},
... always_xy=True,
... )
>>> for pt in transprojr.itransform(
... [(-2.137, 0.661, -20.531)],
... radians=True
... ):
... '{:.3f} {:.3f} {:.3f}'.format(*pt)
'-2704214.394 -4254414.478 3894270.731'
>>> transproj_eq = Transformer.from_proj(
... 'EPSG:4326',
... '+proj=longlat +datum=WGS84 +no_defs +type=crs',
... always_xy=True,
... skip_equivalent=True
... )
>>> for pt in transproj_eq.itransform([(-2.137, 0.661)]):
... '{:.3f} {:.3f}'.format(*pt)
'-2.137 0.661'
"""
it = iter(points) # point iterator
# get first point to check stride
try:
fst_pt = next(it)
except StopIteration:
raise ValueError("iterable must contain at least one point")
stride = len(fst_pt)
if stride not in (2, 3, 4):
raise ValueError("points can contain up to 4 coordinates")
if time_3rd and stride != 3:
raise ValueError("'time_3rd' is only valid for 3 coordinates.")
# create a coordinate sequence generator etc. x1,y1,z1,x2,y2,z2,....
# chain so the generator returns the first point that was already acquired
coord_gen = chain(fst_pt, (coords[c] for coords in it for c in range(stride)))
while True:
# create a temporary buffer storage for
# the next 64 points (64*stride*8 bytes)
buff = array("d", islice(coord_gen, 0, 64 * stride))
if len(buff) == 0:
break
self._transformer._transform_sequence(
stride,
buff,
switch=switch,
direction=direction,
time_3rd=time_3rd,
radians=radians,
errcheck=errcheck,
)
for pt in zip(*([iter(buff)] * stride)):
yield pt
|
def itransform(
self,
points: Any,
switch: bool = False,
time_3rd: bool = False,
radians: bool = False,
errcheck: bool = False,
direction: Union[TransformDirection, str] = TransformDirection.FORWARD,
) -> Iterator[Iterable]:
"""
Iterator/generator version of the function pyproj.Transformer.transform.
.. versionadded:: 2.1.1 errcheck
.. versionadded:: 2.2.0 direction
Parameters
----------
points: list
List of point tuples.
switch: boolean, optional
If True x, y or lon,lat coordinates of points are switched to y, x
or lat, lon. Default is False.
time_3rd: boolean, optional
If the input coordinates are 3 dimensional and the 3rd dimension is time.
radians: boolean, optional
If True, will expect input data to be in radians and will return radians
if the projection is geographic. Default is False (degrees). Ignored for
pipeline transformations.
errcheck: boolean, optional (default False)
If True an exception is raised if the transformation is invalid.
By default errcheck=False and an invalid transformation
returns ``inf`` and no exception is raised.
direction: pyproj.enums.TransformDirection, optional
The direction of the transform.
Default is :attr:`pyproj.enums.TransformDirection.FORWARD`.
Example:
>>> from pyproj import Transformer
>>> transformer = Transformer.from_crs(4326, 2100)
>>> points = [(22.95, 40.63), (22.81, 40.53), (23.51, 40.86)]
>>> for pt in transformer.itransform(points): '{:.3f} {:.3f}'.format(*pt)
'2221638.801 2637034.372'
'2212924.125 2619851.898'
'2238294.779 2703763.736'
>>> pipeline_str = (
... "+proj=pipeline +step +proj=longlat +ellps=WGS84 "
... "+step +proj=unitconvert +xy_in=rad +xy_out=deg"
... )
>>> pipe_trans = Transformer.from_pipeline(pipeline_str)
>>> for pt in pipe_trans.itransform([(2.1, 0.001)]):
... '{:.3f} {:.3f}'.format(*pt)
'120.321 0.057'
>>> transproj = Transformer.from_crs(
... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'},
... "EPSG:4326",
... always_xy=True,
... )
>>> for pt in transproj.itransform(
... [(-2704026.010, -4253051.810, 3895878.820)],
... radians=True,
... ):
... '{:.3f} {:.3f} {:.3f}'.format(*pt)
'-2.137 0.661 -20.531'
>>> transprojr = Transformer.from_crs(
... "EPSG:4326",
... {"proj":'geocent', "ellps":'WGS84', "datum":'WGS84'},
... always_xy=True,
... )
>>> for pt in transprojr.itransform(
... [(-2.137, 0.661, -20.531)],
... radians=True
... ):
... '{:.3f} {:.3f} {:.3f}'.format(*pt)
'-2704214.394 -4254414.478 3894270.731'
>>> transproj_eq = Transformer.from_proj(
... 'EPSG:4326',
... '+proj=longlat +datum=WGS84 +no_defs +type=crs',
... always_xy=True,
... skip_equivalent=True
... )
>>> for pt in transproj_eq.itransform([(-2.137, 0.661)]):
... '{:.3f} {:.3f}'.format(*pt)
'-2.137 0.661'
"""
it = iter(points) # point iterator
# get first point to check stride
try:
fst_pt = next(it)
except StopIteration:
raise ValueError("iterable must contain at least one point")
stride = len(fst_pt)
if stride not in (2, 3, 4):
raise ValueError("points can contain up to 4 coordinates")
if time_3rd and stride != 3:
raise ValueError("'time_3rd' is only valid for 3 coordinates.")
# create a coordinate sequence generator etc. x1,y1,z1,x2,y2,z2,....
# chain so the generator returns the first point that was already acquired
coord_gen = chain(fst_pt, (coords[c] for coords in it for c in range(stride)))
while True:
# create a temporary buffer storage for
# the next 64 points (64*stride*8 bytes)
buff = array("d", islice(coord_gen, 0, 64 * stride))
if len(buff) == 0:
break
self._transformer._transform_sequence(
stride,
buff,
switch=switch,
direction=direction,
time_3rd=time_3rd,
radians=radians,
errcheck=errcheck,
)
for pt in zip(*([iter(buff)] * stride)):
yield pt
|
[{'piece_type': 'other', 'piece_content': 'echo 50 25 0 | cct +proj=pipeline +ellps=GRS80 +step +proj=cart'}, {'piece_type': 'other', 'piece_content': '3717892.6072 4430811.8715 2679074.4629 inf'}, {'piece_type': 'source code', 'piece_content': 'from pyproj import Transformer\\n\\nstring = "+proj=pipeline +ellps=GRS80 +step +proj=cart"\\npipe = Transformer.from_pipeline(string)\\npipe.transform(50, 25, 0, errcheck=True)'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "<stdin>", line 1, in <module>\\nFile "/usr/local/lib/python3.7/site-packages/pyproj/transformer.py", line 446, in transform\\nerrcheck=errcheck,\\nFile "pyproj/_transformer.pyx", line 463, in pyproj._transformer._Transformer._transform\\npyproj.exceptions.ProjError: transform error: latitude or longitude exceeded limits'}]
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.7/site-packages/pyproj/transformer.py", line 446, in transform
errcheck=errcheck,
File "pyproj/_transformer.pyx", line 463, in pyproj._transformer._Transformer._transform
pyproj.exceptions.ProjError: transform error: latitude or longitude exceeded limits
|
pyproj.exceptions.ProjError
|
def from_user_input(value: Any) -> "CRS":
"""
Initialize a CRS class instance with:
- PROJ string
- Dictionary of PROJ parameters
- PROJ keyword arguments for parameters
- JSON string with PROJ parameters
- CRS WKT string
- An authority string [i.e. 'epsg:4326']
- An EPSG integer code [i.e. 4326]
- A tuple of ("auth_name": "auth_code") [i.e ('epsg', '4326')]
- An object with a `to_wkt` method.
- A :class:`pyproj.crs.CRS` class
Parameters
----------
value : obj
A Python int, dict, or str.
Returns
-------
CRS
"""
if isinstance(value, CRS):
return value
return CRS(value)
|
def from_user_input(value: str) -> "CRS":
"""
Initialize a CRS class instance with:
- PROJ string
- Dictionary of PROJ parameters
- PROJ keyword arguments for parameters
- JSON string with PROJ parameters
- CRS WKT string
- An authority string [i.e. 'epsg:4326']
- An EPSG integer code [i.e. 4326]
- A tuple of ("auth_name": "auth_code") [i.e ('epsg', '4326')]
- An object with a `to_wkt` method.
- A :class:`pyproj.crs.CRS` class
Parameters
----------
value : obj
A Python int, dict, or str.
Returns
-------
CRS
"""
if isinstance(value, CRS):
return value
return CRS(value)
|
[{'piece_type': 'error message', 'piece_content': 'import pyproj\\n\\n---------------------------------------------------------------------------\\nKeyError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nKeyError: \\'URN:OGC:DEF:DATUM:EPSG::6326\\'\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nCRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n<ipython-input-1-98cb605ea9de> in <module>\\n----> 1 import pyproj\\n\\n~/scipy/repos/pyproj/pyproj/__init__.py in <module>\\n79 )\\n80 from pyproj._show_versions import show_versions # noqa: F401\\n---> 81 from pyproj.crs import CRS # noqa: F401\\n82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401\\n83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401\\n\\n~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>\\n17 is_wkt,\\n18 )\\n---> 19 from pyproj.crs.crs import ( # noqa: F401\\n20 CRS,\\n21 BoundCRS,\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>\\n1026\\n1027\\n-> 1028 class ProjectedCRS(CRS):\\n1029 """\\n1030 .. versionadded:: 2.5.0\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()\\n1038 name="undefined",\\n1039 cartesian_cs=Cartesian2DCS(),\\n-> 1040 geodetic_crs=GeographicCRS(),\\n1041 ):\\n1042 """\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)\\n977 "type": "GeographicCRS",\\n978 "name": name,\\n--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),\\n980 "coordinate_system": CoordinateSystem.from_user_input(\\n981 ellipsoidal_cs\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()\\n\\nCRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)'}]
|
import pyproj
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
KeyError: 'URN:OGC:DEF:DATUM:EPSG::6326'
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
CRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
<ipython-input-1-98cb605ea9de> in <module>
----> 1 import pyproj
~/scipy/repos/pyproj/pyproj/__init__.py in <module>
79 )
80 from pyproj._show_versions import show_versions # noqa: F401
---> 81 from pyproj.crs import CRS # noqa: F401
82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401
83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401
~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>
17 is_wkt,
18 )
---> 19 from pyproj.crs.crs import ( # noqa: F401
20 CRS,
21 BoundCRS,
~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>
1026
1027
-> 1028 class ProjectedCRS(CRS):
1029 """
1030 .. versionadded:: 2.5.0
~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()
1038 name="undefined",
1039 cartesian_cs=Cartesian2DCS(),
-> 1040 geodetic_crs=GeographicCRS(),
1041 ):
1042 """
~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)
977 "type": "GeographicCRS",
978 "name": name,
--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),
980 "coordinate_system": CoordinateSystem.from_user_input(
981 ellipsoidal_cs
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()
CRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)
|
KeyError
|
def __init__(
self,
name: str = "undefined",
datum: Any = "urn:ogc:def:datum:EPSG::6326",
ellipsoidal_cs: Any = None,
) -> None:
"""
Parameters
----------
name: str, optional
Name of the CRS. Default is undefined.
datum: Any, optional
Anything accepted by :meth:`pyproj.crs.Datum.from_user_input` or
a :class:`pyproj.crs.datum.CustomDatum`.
ellipsoidal_cs: Any, optional
Input to create an Ellipsoidal Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`.
"""
geographic_crs_json = {
"$schema": "https://proj.org/schemas/v0.2/projjson.schema.json",
"type": "GeographicCRS",
"name": name,
"datum": Datum.from_user_input(datum).to_json_dict(),
"coordinate_system": CoordinateSystem.from_user_input(
ellipsoidal_cs or Ellipsoidal2DCS()
).to_json_dict(),
}
super().__init__(geographic_crs_json)
|
def __init__(
self,
name: str = "undefined",
datum: Any = "urn:ogc:def:datum:EPSG::6326",
ellipsoidal_cs: Any = Ellipsoidal2DCS(),
) -> None:
"""
Parameters
----------
name: str, optional
Name of the CRS. Default is undefined.
datum: Any, optional
Anything accepted by :meth:`pyproj.crs.Datum.from_user_input` or
a :class:`pyproj.crs.datum.CustomDatum`.
ellipsoidal_cs: Any, optional
Input to create an Ellipsoidal Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`.
"""
geographic_crs_json = {
"$schema": "https://proj.org/schemas/v0.2/projjson.schema.json",
"type": "GeographicCRS",
"name": name,
"datum": Datum.from_user_input(datum).to_json_dict(),
"coordinate_system": CoordinateSystem.from_user_input(
ellipsoidal_cs
).to_json_dict(),
}
super().__init__(geographic_crs_json)
|
[{'piece_type': 'error message', 'piece_content': 'import pyproj\\n\\n---------------------------------------------------------------------------\\nKeyError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nKeyError: \\'URN:OGC:DEF:DATUM:EPSG::6326\\'\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nCRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n<ipython-input-1-98cb605ea9de> in <module>\\n----> 1 import pyproj\\n\\n~/scipy/repos/pyproj/pyproj/__init__.py in <module>\\n79 )\\n80 from pyproj._show_versions import show_versions # noqa: F401\\n---> 81 from pyproj.crs import CRS # noqa: F401\\n82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401\\n83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401\\n\\n~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>\\n17 is_wkt,\\n18 )\\n---> 19 from pyproj.crs.crs import ( # noqa: F401\\n20 CRS,\\n21 BoundCRS,\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>\\n1026\\n1027\\n-> 1028 class ProjectedCRS(CRS):\\n1029 """\\n1030 .. versionadded:: 2.5.0\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()\\n1038 name="undefined",\\n1039 cartesian_cs=Cartesian2DCS(),\\n-> 1040 geodetic_crs=GeographicCRS(),\\n1041 ):\\n1042 """\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)\\n977 "type": "GeographicCRS",\\n978 "name": name,\\n--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),\\n980 "coordinate_system": CoordinateSystem.from_user_input(\\n981 ellipsoidal_cs\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()\\n\\nCRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)'}]
|
import pyproj
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
KeyError: 'URN:OGC:DEF:DATUM:EPSG::6326'
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
CRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
<ipython-input-1-98cb605ea9de> in <module>
----> 1 import pyproj
~/scipy/repos/pyproj/pyproj/__init__.py in <module>
79 )
80 from pyproj._show_versions import show_versions # noqa: F401
---> 81 from pyproj.crs import CRS # noqa: F401
82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401
83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401
~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>
17 is_wkt,
18 )
---> 19 from pyproj.crs.crs import ( # noqa: F401
20 CRS,
21 BoundCRS,
~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>
1026
1027
-> 1028 class ProjectedCRS(CRS):
1029 """
1030 .. versionadded:: 2.5.0
~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()
1038 name="undefined",
1039 cartesian_cs=Cartesian2DCS(),
-> 1040 geodetic_crs=GeographicCRS(),
1041 ):
1042 """
~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)
977 "type": "GeographicCRS",
978 "name": name,
--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),
980 "coordinate_system": CoordinateSystem.from_user_input(
981 ellipsoidal_cs
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()
CRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)
|
KeyError
|
def __init__(
self,
base_crs: Any,
conversion: Any,
ellipsoidal_cs: Any = None,
name: str = "undefined",
) -> None:
"""
Parameters
----------
base_crs: Any
Input to create the Geodetic CRS, a :class:`GeographicCRS` or
anything accepted by :meth:`pyproj.crs.CRS.from_user_input`.
conversion: Any
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or a conversion from :ref:`coordinate_operation`.
ellipsoidal_cs: Any, optional
Input to create an Ellipsoidal Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`.
name: str, optional
Name of the CRS. Default is undefined.
"""
derived_geographic_crs_json = {
"$schema": "https://proj.org/schemas/v0.2/projjson.schema.json",
"type": "DerivedGeographicCRS",
"name": name,
"base_crs": CRS.from_user_input(base_crs).to_json_dict(),
"conversion": CoordinateOperation.from_user_input(
conversion
).to_json_dict(),
"coordinate_system": CoordinateSystem.from_user_input(
ellipsoidal_cs or Ellipsoidal2DCS()
).to_json_dict(),
}
super().__init__(derived_geographic_crs_json)
|
def __init__(
self,
base_crs: Any,
conversion: Any,
ellipsoidal_cs: Any = Ellipsoidal2DCS(),
name: str = "undefined",
) -> None:
"""
Parameters
----------
base_crs: Any
Input to create the Geodetic CRS, a :class:`GeographicCRS` or
anything accepted by :meth:`pyproj.crs.CRS.from_user_input`.
conversion: Any
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or a conversion from :ref:`coordinate_operation`.
ellipsoidal_cs: Any, optional
Input to create an Ellipsoidal Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`.
name: str, optional
Name of the CRS. Default is undefined.
"""
derived_geographic_crs_json = {
"$schema": "https://proj.org/schemas/v0.2/projjson.schema.json",
"type": "DerivedGeographicCRS",
"name": name,
"base_crs": CRS.from_user_input(base_crs).to_json_dict(),
"conversion": CoordinateOperation.from_user_input(
conversion
).to_json_dict(),
"coordinate_system": CoordinateSystem.from_user_input(
ellipsoidal_cs
).to_json_dict(),
}
super().__init__(derived_geographic_crs_json)
|
[{'piece_type': 'error message', 'piece_content': 'import pyproj\\n\\n---------------------------------------------------------------------------\\nKeyError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nKeyError: \\'URN:OGC:DEF:DATUM:EPSG::6326\\'\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nCRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n<ipython-input-1-98cb605ea9de> in <module>\\n----> 1 import pyproj\\n\\n~/scipy/repos/pyproj/pyproj/__init__.py in <module>\\n79 )\\n80 from pyproj._show_versions import show_versions # noqa: F401\\n---> 81 from pyproj.crs import CRS # noqa: F401\\n82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401\\n83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401\\n\\n~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>\\n17 is_wkt,\\n18 )\\n---> 19 from pyproj.crs.crs import ( # noqa: F401\\n20 CRS,\\n21 BoundCRS,\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>\\n1026\\n1027\\n-> 1028 class ProjectedCRS(CRS):\\n1029 """\\n1030 .. versionadded:: 2.5.0\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()\\n1038 name="undefined",\\n1039 cartesian_cs=Cartesian2DCS(),\\n-> 1040 geodetic_crs=GeographicCRS(),\\n1041 ):\\n1042 """\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)\\n977 "type": "GeographicCRS",\\n978 "name": name,\\n--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),\\n980 "coordinate_system": CoordinateSystem.from_user_input(\\n981 ellipsoidal_cs\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()\\n\\nCRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)'}]
|
import pyproj
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
KeyError: 'URN:OGC:DEF:DATUM:EPSG::6326'
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
CRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
<ipython-input-1-98cb605ea9de> in <module>
----> 1 import pyproj
~/scipy/repos/pyproj/pyproj/__init__.py in <module>
79 )
80 from pyproj._show_versions import show_versions # noqa: F401
---> 81 from pyproj.crs import CRS # noqa: F401
82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401
83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401
~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>
17 is_wkt,
18 )
---> 19 from pyproj.crs.crs import ( # noqa: F401
20 CRS,
21 BoundCRS,
~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>
1026
1027
-> 1028 class ProjectedCRS(CRS):
1029 """
1030 .. versionadded:: 2.5.0
~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()
1038 name="undefined",
1039 cartesian_cs=Cartesian2DCS(),
-> 1040 geodetic_crs=GeographicCRS(),
1041 ):
1042 """
~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)
977 "type": "GeographicCRS",
978 "name": name,
--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),
980 "coordinate_system": CoordinateSystem.from_user_input(
981 ellipsoidal_cs
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()
CRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)
|
KeyError
|
def __init__(
self,
conversion: Any,
name: str = "undefined",
cartesian_cs: Any = None,
geodetic_crs: Any = None,
) -> None:
"""
Parameters
----------
conversion: Any
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or a conversion from :ref:`coordinate_operation`.
name: str, optional
The name of the Projected CRS. Default is undefined.
cartesian_cs: Any, optional
Input to create a Cartesian Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.Cartesian2DCS`.
geodetic_crs: Any, optional
Input to create the Geodetic CRS, a :class:`GeographicCRS` or
anything accepted by :meth:`pyproj.crs.CRS.from_user_input`.
"""
proj_crs_json = {
"$schema": "https://proj.org/schemas/v0.2/projjson.schema.json",
"type": "ProjectedCRS",
"name": name,
"base_crs": CRS.from_user_input(
geodetic_crs or GeographicCRS()
).to_json_dict(),
"conversion": CoordinateOperation.from_user_input(
conversion
).to_json_dict(),
"coordinate_system": CoordinateSystem.from_user_input(
cartesian_cs or Cartesian2DCS()
).to_json_dict(),
}
super().__init__(proj_crs_json)
|
def __init__(
self,
conversion: Any,
name: str = "undefined",
cartesian_cs: Any = Cartesian2DCS(),
geodetic_crs: Any = GeographicCRS(),
) -> None:
"""
Parameters
----------
conversion: Any
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or a conversion from :ref:`coordinate_operation`.
name: str, optional
The name of the Projected CRS. Default is undefined.
cartesian_cs: Any, optional
Input to create a Cartesian Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.Cartesian2DCS`.
geodetic_crs: Any, optional
Input to create the Geodetic CRS, a :class:`GeographicCRS` or
anything accepted by :meth:`pyproj.crs.CRS.from_user_input`.
"""
proj_crs_json = {
"$schema": "https://proj.org/schemas/v0.2/projjson.schema.json",
"type": "ProjectedCRS",
"name": name,
"base_crs": CRS.from_user_input(geodetic_crs).to_json_dict(),
"conversion": CoordinateOperation.from_user_input(
conversion
).to_json_dict(),
"coordinate_system": CoordinateSystem.from_user_input(
cartesian_cs
).to_json_dict(),
}
super().__init__(proj_crs_json)
|
[{'piece_type': 'error message', 'piece_content': 'import pyproj\\n\\n---------------------------------------------------------------------------\\nKeyError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nKeyError: \\'URN:OGC:DEF:DATUM:EPSG::6326\\'\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nCRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n<ipython-input-1-98cb605ea9de> in <module>\\n----> 1 import pyproj\\n\\n~/scipy/repos/pyproj/pyproj/__init__.py in <module>\\n79 )\\n80 from pyproj._show_versions import show_versions # noqa: F401\\n---> 81 from pyproj.crs import CRS # noqa: F401\\n82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401\\n83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401\\n\\n~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>\\n17 is_wkt,\\n18 )\\n---> 19 from pyproj.crs.crs import ( # noqa: F401\\n20 CRS,\\n21 BoundCRS,\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>\\n1026\\n1027\\n-> 1028 class ProjectedCRS(CRS):\\n1029 """\\n1030 .. versionadded:: 2.5.0\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()\\n1038 name="undefined",\\n1039 cartesian_cs=Cartesian2DCS(),\\n-> 1040 geodetic_crs=GeographicCRS(),\\n1041 ):\\n1042 """\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)\\n977 "type": "GeographicCRS",\\n978 "name": name,\\n--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),\\n980 "coordinate_system": CoordinateSystem.from_user_input(\\n981 ellipsoidal_cs\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()\\n\\nCRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)'}]
|
import pyproj
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
KeyError: 'URN:OGC:DEF:DATUM:EPSG::6326'
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
CRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
<ipython-input-1-98cb605ea9de> in <module>
----> 1 import pyproj
~/scipy/repos/pyproj/pyproj/__init__.py in <module>
79 )
80 from pyproj._show_versions import show_versions # noqa: F401
---> 81 from pyproj.crs import CRS # noqa: F401
82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401
83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401
~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>
17 is_wkt,
18 )
---> 19 from pyproj.crs.crs import ( # noqa: F401
20 CRS,
21 BoundCRS,
~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>
1026
1027
-> 1028 class ProjectedCRS(CRS):
1029 """
1030 .. versionadded:: 2.5.0
~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()
1038 name="undefined",
1039 cartesian_cs=Cartesian2DCS(),
-> 1040 geodetic_crs=GeographicCRS(),
1041 ):
1042 """
~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)
977 "type": "GeographicCRS",
978 "name": name,
--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),
980 "coordinate_system": CoordinateSystem.from_user_input(
981 ellipsoidal_cs
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()
CRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)
|
KeyError
|
def __init__(
self,
name: str,
datum: Any,
vertical_cs: Any = None,
geoid_model: Optional[str] = None,
) -> None:
"""
Parameters
----------
name: str
The name of the Vertical CRS (e.g. NAVD88 height).
datum: Any
Anything accepted by :meth:`pyproj.crs.Datum.from_user_input`
vertical_cs: Any, optional
Input to create a Vertical Coordinate System accepted by
:meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.VerticalCS`
geoid_model: str, optional
The name of the GEOID Model (e.g. GEOID12B).
"""
vert_crs_json = {
"$schema": "https://proj.org/schemas/v0.2/projjson.schema.json",
"type": "VerticalCRS",
"name": name,
"datum": Datum.from_user_input(datum).to_json_dict(),
"coordinate_system": CoordinateSystem.from_user_input(
vertical_cs or VerticalCS()
).to_json_dict(),
}
if geoid_model is not None:
vert_crs_json["geoid_model"] = {"name": geoid_model}
super().__init__(vert_crs_json)
|
def __init__(
self,
name: str,
datum: Any,
vertical_cs: Any = VerticalCS(),
geoid_model: str = None,
) -> None:
"""
Parameters
----------
name: str
The name of the Vertical CRS (e.g. NAVD88 height).
datum: Any
Anything accepted by :meth:`pyproj.crs.Datum.from_user_input`
vertical_cs: Any, optional
Input to create a Vertical Coordinate System accepted by
:meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.VerticalCS`
geoid_model: str, optional
The name of the GEOID Model (e.g. GEOID12B).
"""
vert_crs_json = {
"$schema": "https://proj.org/schemas/v0.2/projjson.schema.json",
"type": "VerticalCRS",
"name": name,
"datum": Datum.from_user_input(datum).to_json_dict(),
"coordinate_system": CoordinateSystem.from_user_input(
vertical_cs
).to_json_dict(),
}
if geoid_model is not None:
vert_crs_json["geoid_model"] = {"name": geoid_model}
super().__init__(vert_crs_json)
|
[{'piece_type': 'error message', 'piece_content': 'import pyproj\\n\\n---------------------------------------------------------------------------\\nKeyError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nKeyError: \\'URN:OGC:DEF:DATUM:EPSG::6326\\'\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()\\n\\nCRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326\\n\\nDuring handling of the above exception, another exception occurred:\\n\\nCRSError Traceback (most recent call last)\\n<ipython-input-1-98cb605ea9de> in <module>\\n----> 1 import pyproj\\n\\n~/scipy/repos/pyproj/pyproj/__init__.py in <module>\\n79 )\\n80 from pyproj._show_versions import show_versions # noqa: F401\\n---> 81 from pyproj.crs import CRS # noqa: F401\\n82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401\\n83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401\\n\\n~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>\\n17 is_wkt,\\n18 )\\n---> 19 from pyproj.crs.crs import ( # noqa: F401\\n20 CRS,\\n21 BoundCRS,\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>\\n1026\\n1027\\n-> 1028 class ProjectedCRS(CRS):\\n1029 """\\n1030 .. versionadded:: 2.5.0\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()\\n1038 name="undefined",\\n1039 cartesian_cs=Cartesian2DCS(),\\n-> 1040 geodetic_crs=GeographicCRS(),\\n1041 ):\\n1042 """\\n\\n~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)\\n977 "type": "GeographicCRS",\\n978 "name": name,\\n--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),\\n980 "coordinate_system": CoordinateSystem.from_user_input(\\n981 ellipsoidal_cs\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()\\n\\n~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()\\n\\nCRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)'}]
|
import pyproj
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
KeyError: 'URN:OGC:DEF:DATUM:EPSG::6326'
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_name()
CRSError: Invalid datum name: urn:ogc:def:datum:EPSG::6326
During handling of the above exception, another exception occurred:
CRSError Traceback (most recent call last)
<ipython-input-1-98cb605ea9de> in <module>
----> 1 import pyproj
~/scipy/repos/pyproj/pyproj/__init__.py in <module>
79 )
80 from pyproj._show_versions import show_versions # noqa: F401
---> 81 from pyproj.crs import CRS # noqa: F401
82 from pyproj.exceptions import DataDirError, ProjError # noqa: F401
83 from pyproj.geod import Geod, geodesic_version_str, pj_ellps # noqa: F401
~/scipy/repos/pyproj/pyproj/crs/__init__.py in <module>
17 is_wkt,
18 )
---> 19 from pyproj.crs.crs import ( # noqa: F401
20 CRS,
21 BoundCRS,
~/scipy/repos/pyproj/pyproj/crs/crs.py in <module>
1026
1027
-> 1028 class ProjectedCRS(CRS):
1029 """
1030 .. versionadded:: 2.5.0
~/scipy/repos/pyproj/pyproj/crs/crs.py in ProjectedCRS()
1038 name="undefined",
1039 cartesian_cs=Cartesian2DCS(),
-> 1040 geodetic_crs=GeographicCRS(),
1041 ):
1042 """
~/scipy/repos/pyproj/pyproj/crs/crs.py in __init__(self, name, datum, ellipsoidal_cs)
977 "type": "GeographicCRS",
978 "name": name,
--> 979 "datum": Datum.from_user_input(datum).to_json_dict(),
980 "coordinate_system": CoordinateSystem.from_user_input(
981 ellipsoidal_cs
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs._CRSParts.from_user_input()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum.from_string()
~/scipy/repos/pyproj/pyproj/_crs.pyx in pyproj._crs.Datum._from_string()
CRSError: Invalid datum string: urn:ogc:def:datum:EPSG::6326: (Internal Proj Error: proj_create: SQLite error on SELECT name, ellipsoid_auth_name, ellipsoid_code, prime_meridian_auth_name, prime_meridian_code, area_of_use_auth_name, area_of_use_code, publication_date, deprecated FROM geodetic_datum WHERE auth_name = ? AND code = ?: no such column: publication_date)
|
KeyError
|
def set_data_dir(proj_data_dir):
"""
Set the data directory for PROJ to use.
Parameters
----------
proj_data_dir: str
The path to rhe PROJ data directory.
"""
global _USER_PROJ_DATA
_USER_PROJ_DATA = proj_data_dir
# reset search paths
from pyproj._datadir import PYPROJ_CONTEXT
PYPROJ_CONTEXT.set_search_paths(reset=True)
|
def set_data_dir(proj_data_dir):
"""
Set the data directory for PROJ to use.
Parameters
----------
proj_data_dir: str
The path to rhe PROJ data directory.
"""
global _USER_PROJ_DATA
_USER_PROJ_DATA = proj_data_dir
# reset search paths
from pyproj._datadir import PYPROJ_CONTEXT
PYPROJ_CONTEXT.set_search_paths()
|
[{'piece_type': 'other', 'piece_content': 'Fatal Python error: Segmentation fault\\n\\nCurrent thread 0x00007fa0f79c4700 (most recent call first):\\nFile "/opt/conda/lib/python3.7/site-packages/pyproj/crs.py", line 303 in __init__\\nFile "/opt/conda/lib/python3.7/site-packages/pyproj/crs.py", line 434 in from_user_input\\nFile "/opt/conda/lib/python3.7/site-packages/pyproj/proj.py", line 145 in __init__\\nFile "/opt/conda/lib/python3.7/site-packages/geopandas/geoseries.py", line 304 in to_crs\\nFile "/opt/conda/lib/python3.7/site-packages/geopandas/geodataframe.py", line 459 in to_crs\\n...\\nFile "<stdin>", line 1 in <module>\\nSegmentation fault (core dumped)'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "<stdin>", line 1, in <module>\\n...\\nFile "/opt/conda/lib/python3.7/site-packages/geopandas/geodataframe.py", line 459, in to_crs\\ngeom = df.geometry.to_crs(crs=crs, epsg=epsg)\\nFile "/opt/conda/lib/python3.7/site-packages/geopandas/geoseries.py", line 304, in to_crs\\nproj_in = pyproj.Proj(self.crs, preserve_units=True)\\nFile "/opt/conda/lib/python3.7/site-packages/pyproj/proj.py", line 147, in __init__\\nself.crs = CRS.from_user_input(projparams if projparams is not None else kwargs)\\nFile "/opt/conda/lib/python3.7/site-packages/pyproj/crs.py", line 435, in from_user_input\\nreturn cls(value)\\nFile "/opt/conda/lib/python3.7/site-packages/pyproj/crs.py", line 304, in __init__\\nsuper(CRS, self).__init__(projstring)\\nFile "pyproj/_crs.pyx", line 1308, in pyproj._crs._CRS.__init__\\nFile "pyproj/_datadir.pyx", line 18, in pyproj._datadir.get_pyproj_context\\nFile "/opt/conda/lib/python3.7/site-packages/pyproj/datadir.py", line 99, in get_data_dir\\n"Valid PROJ data directory not found. "\\npyproj.exceptions.DataDirError: Valid PROJ data directory not found. Either set the path using the environmental variable PROJ_LIB or with `pyproj.datadir.set_data_dir`.'}, {'piece_type': 'other', 'piece_content': 'System:\\npython: 3.7.3 | packaged by conda-forge | (default, Jul 1 2019, 21:52:21) [GCC 7.3.0]\\nexecutable: /usr/bin/condapy\\nmachine: Linux-4.15.0-1037-gcp-x86_64-with-debian-stretch-sid\\n\\nPROJ:\\nPROJ: 6.1.1\\ndata dir: None\\n\\nPython deps:\\npyproj: 2.2.2\\npip: 19.2.3\\nsetuptools: 41.2.0\\nCython: None\\naenum: None'}, {'piece_type': 'other', 'piece_content': 'System:\\npython: 3.7.3 | packaged by conda-forge | (default, Jul 1 2019, 21:52:21) [GCC 7.3.0]\\nexecutable: /usr/bin/condapy\\nmachine: Linux-4.15.0-1037-gcp-x86_64-with-debian-stretch-sid\\n\\nPROJ:\\nPROJ: 6.1.1\\ndata dir: /opt/conda/share/proj\\n\\nPython deps:\\npyproj: 2.3.0\\npip: 19.2.3\\nsetuptools: 41.2.0\\nCython: None'}, {'piece_type': 'other', 'piece_content': 'wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \\\\\\n/bin/bash ~/miniconda.sh -b -p /opt/conda && \\\\\\nconda update conda -y && \\\\\\nconda config --add channels conda-forge && \\\\\\nconda config --set channel_priority strict && \\\\\\nconda install \\\\\\ngeopandas numexpr bottleneck'}, {'piece_type': 'other', 'piece_content': 'proj4 6.1.1 hc80f0dc_1 conda-forge\\npyproj 2.3.0 py37h2fd02e8_0 conda-forge'}, {'piece_type': 'other', 'piece_content': 'active environment : None\\nuser config file : /root/.condarc\\npopulated config files : /root/.condarc\\nconda version : 4.7.11\\nconda-build version : not installed\\npython version : 3.7.3.final.0\\nvirtual packages :\\nbase environment : /opt/conda (writable)\\nchannel URLs : https://conda.anaconda.org/conda-forge/linux-64\\nhttps://conda.anaconda.org/conda-forge/noarch\\nhttps://repo.anaconda.com/pkgs/main/linux-64\\nhttps://repo.anaconda.com/pkgs/main/noarch\\nhttps://repo.anaconda.com/pkgs/r/linux-64\\nhttps://repo.anaconda.com/pkgs/r/noarch\\npackage cache : /opt/conda/pkgs\\n/root/.conda/pkgs\\nenvs directories : /opt/conda/envs\\n/root/.conda/envs\\nplatform : linux-64\\nuser-agent : conda/4.7.11 requests/2.22.0 CPython/3.7.3 Linux/4.15.0-1037-gcp ubuntu/16.04.6 glibc/2.23\\nUID:GID : 0:0\\nnetrc file : None\\noffline mode : False'}]
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
...
File "/opt/conda/lib/python3.7/site-packages/geopandas/geodataframe.py", line 459, in to_crs
geom = df.geometry.to_crs(crs=crs, epsg=epsg)
File "/opt/conda/lib/python3.7/site-packages/geopandas/geoseries.py", line 304, in to_crs
proj_in = pyproj.Proj(self.crs, preserve_units=True)
File "/opt/conda/lib/python3.7/site-packages/pyproj/proj.py", line 147, in __init__
self.crs = CRS.from_user_input(projparams if projparams is not None else kwargs)
File "/opt/conda/lib/python3.7/site-packages/pyproj/crs.py", line 435, in from_user_input
return cls(value)
File "/opt/conda/lib/python3.7/site-packages/pyproj/crs.py", line 304, in __init__
super(CRS, self).__init__(projstring)
File "pyproj/_crs.pyx", line 1308, in pyproj._crs._CRS.__init__
File "pyproj/_datadir.pyx", line 18, in pyproj._datadir.get_pyproj_context
File "/opt/conda/lib/python3.7/site-packages/pyproj/datadir.py", line 99, in get_data_dir
"Valid PROJ data directory not found. "
pyproj.exceptions.DataDirError: Valid PROJ data directory not found. Either set the path using the environmental variable PROJ_LIB or with `pyproj.datadir.set_data_dir`.
|
pyproj.exceptions.DataDirError
|
def set_data_dir(proj_data_dir):
"""
Set the data directory for PROJ to use.
Parameters
----------
proj_data_dir: str
The path to rhe PROJ data directory.
"""
global _USER_PROJ_DATA
_USER_PROJ_DATA = proj_data_dir
# reset search paths
from pyproj._datadir import PYPROJ_CONTEXT
PYPROJ_CONTEXT.set_search_paths()
|
def set_data_dir(proj_data_dir):
"""
Set the data directory for PROJ to use.
Parameters
----------
proj_data_dir: str
The path to rhe PROJ data directory.
"""
global _USER_PROJ_DATA
global _VALIDATED_PROJ_DATA
_USER_PROJ_DATA = proj_data_dir
# set to none to re-validate
_VALIDATED_PROJ_DATA = None
|
[{'piece_type': 'error message', 'piece_content': '97%|█████████████████████████████████▊ | 88243/91210 [00:26<00:00, 6190.94it/s]\\nCRSs instantiated: 507\\nCRSs instantiated (cache hits included): 88603\\nTransformers instantiated: 502\\nTransformers instantiated (cache hits included): 88389\\n---------------------------------------------------------------------------\\nProjError Traceback (most recent call last)\\n... <snip> ...\\n~/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/pyproj/transformer.py in from_proj(proj_from, proj_to, skip_equivalent, always_xy)\\n\\npyproj/_transformer.pyx in pyproj._transformer._Transformer.from_crs()\\n\\nProjError: Error creating CRS to CRS.: (Internal Proj Error: proj_create: no dat\\nabase context specified)\\n\\nIn [2]:\\nDo you really want to exit ([y]/n)?\\n\\nError in atexit._run_exitfuncs:\\nTraceback (most recent call last):\\nFile "/home/elias/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/IPython/core/history.py", line 578, in end_session\\nsqlite3.OperationalError: unable to open database file'}, {'piece_type': 'source code', 'piece_content': 'import pyproj, tqdm\\nUSE_CACHE = True # without USE_CACHE, this is painfully slow.\\nwkt_cache = {}; transformer_cache = {}\\n# Keep track of the number of invocations:\\ntransforms = transforms_with_cache = 0\\ncrss_created = crss_created_with_cache = 0\\n\\ndef get_crs(code):\\nglobal crss_created\\nif code in wkt_cache: return wkt_cache[code]\\ntry: crs = pyproj.CRS.from_authority(\\'esri\\', code)\\nexcept: crs = pyproj.CRS.from_epsg(code)\\nif USE_CACHE: wkt_cache[code] = crs\\ncrss_created += 1\\nreturn crs\\n\\n# lines = [next(open(\\'wkts.txt\\', \\'rt\\'))] * 200_000 # This does not trigger the bug\\nlines = open(\\'wkts.txt\\', \\'rt\\').readlines()\\nproj_wgs84 = pyproj.Proj("+init=epsg:4326")\\n\\ndef main(lines):\\nglobal crss_created, crss_created_with_cache, transforms_with_cache, transforms\\nfor line in tqdm.tqdm(lines):\\ntry:\\nkey = wkid = int(line.strip())\\ncrs = get_crs(wkid)\\nexcept ValueError:\\nkey = wkt = line.strip()\\nif wkt in wkt_cache:\\ncrs = wkt_cache[wkt]\\nelse:\\ncrs = wkt_cache[wkt] = pyproj.CRS.from_wkt(wkt)\\ncrss_created += 1\\n\\ncrss_created_with_cache += 1\\ntry:\\nif USE_CACHE and key in transformer_cache:\\nt = transformer_cache[key]\\nelse:\\nt = transformer_cache[key] = pyproj.Transformer.from_proj(crs, proj_wgs84)\\ntransforms += 1\\ntransforms_with_cache += 1\\nexcept Exception as ex:\\nif \\'Input is not a transformation\\' not in str(ex): raise\\n\\ntry:\\nmain(lines)\\nfinally:\\nprint(\\'CRSs instantiated:\\', crss_created)\\nprint(\\'CRSs instantiated (cache hits included):\\', crss_created_with_cache)\\nprint(\\'Transformers instantiated:\\', transforms)\\nprint(\\'Transformers instantiated (cache hits included):\\', transforms_with_cache)'}, {'piece_type': 'other', 'piece_content': 'System:\\npython: 3.7.3 (default, Apr 3 2019, 19:16:38) [GCC 8.0.1 20180414 (experimental) [trunk revision 259383]]\\nexecutable: /home/elias/.local/share/virtualenvs/bug-Ew6sNC7W/bin/python\\nmachine: Linux-4.15.0-54-generic-x86_64-with-Ubuntu-18.04-bionic\\n\\nPROJ:\\nPROJ: 6.1.0\\ndata dir: /home/elias/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/pyproj/proj_dir/share/proj\\n\\nPython deps:\\npyproj: 2.2.1\\npip: 19.1.1\\nsetuptools: 41.0.1\\nCython: None\\naenum: None```\\n\\nconda:'}, {'piece_type': 'other', 'piece_content': "#### Installation method\\n- conda, pip wheel, from source, etc...\\n\\n#### Conda environment information (if you installed with conda):\\n\\n<br/>\\nNB: conda environment was created with: conda install -c conda-forge 'pyproj>2.2' numpy\\nEnvironment (<code>conda list</code>):\\n<details>"}, {'piece_type': 'other', 'piece_content': '</details>\\n\\n<br/>\\nDetails about <code>conda</code> and system ( <code>conda info</code> ):\\n<details>'}, {'piece_type': 'other', 'piece_content': '</details>'}]
|
97%|█████████████████████████████████▊ | 88243/91210 [00:26<00:00, 6190.94it/s]
CRSs instantiated: 507
CRSs instantiated (cache hits included): 88603
Transformers instantiated: 502
Transformers instantiated (cache hits included): 88389
---------------------------------------------------------------------------
ProjError Traceback (most recent call last)
... <snip> ...
~/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/pyproj/transformer.py in from_proj(proj_from, proj_to, skip_equivalent, always_xy)
pyproj/_transformer.pyx in pyproj._transformer._Transformer.from_crs()
ProjError: Error creating CRS to CRS.: (Internal Proj Error: proj_create: no dat
abase context specified)
In [2]:
Do you really want to exit ([y]/n)?
Error in atexit._run_exitfuncs:
Traceback (most recent call last):
File "/home/elias/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/IPython/core/history.py", line 578, in end_session
sqlite3.OperationalError: unable to open database file
|
ProjError
|
def get_data_dir():
"""
The order of preference for the data directory is:
1. The one set by pyproj.datadir.set_data_dir (if exists & valid)
2. The internal proj directory (if exists & valid)
3. The directory in PROJ_LIB (if exists & valid)
4. The directory on the PATH (if exists & valid)
Returns
-------
str: The valid data directory.
"""
global _USER_PROJ_DATA
internal_datadir = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "proj_dir", "share", "proj"
)
proj_lib_dirs = os.environ.get("PROJ_LIB", "")
def valid_data_dir(potential_data_dir):
if potential_data_dir is not None and os.path.exists(
os.path.join(potential_data_dir, "proj.db")
):
return True
return False
def valid_data_dirs(potential_data_dirs):
if potential_data_dirs is None:
return False
for proj_data_dir in potential_data_dirs.split(os.pathsep):
if valid_data_dir(proj_data_dir):
return True
break
return None
validated_proj_data = None
if valid_data_dirs(_USER_PROJ_DATA):
validated_proj_data = _USER_PROJ_DATA
elif valid_data_dir(internal_datadir):
validated_proj_data = internal_datadir
elif valid_data_dirs(proj_lib_dirs):
validated_proj_data = proj_lib_dirs
else:
proj_exe = find_executable("proj")
if proj_exe is not None:
system_proj_dir = os.path.join(
os.path.dirname(os.path.dirname(proj_exe)), "share", "proj"
)
if valid_data_dir(system_proj_dir):
validated_proj_data = system_proj_dir
if validated_proj_data is None:
raise DataDirError(
"Valid PROJ data directory not found. "
"Either set the path using the environmental variable PROJ_LIB or "
"with `pyproj.datadir.set_data_dir`."
)
return validated_proj_data
|
def get_data_dir():
"""
The order of preference for the data directory is:
1. The one set by pyproj.datadir.set_data_dir (if exists & valid)
2. The internal proj directory (if exists & valid)
3. The directory in PROJ_LIB (if exists & valid)
4. The directory on the PATH (if exists & valid)
Returns
-------
str: The valid data directory.
"""
# to avoid re-validating
global _VALIDATED_PROJ_DATA
if _VALIDATED_PROJ_DATA is not None:
return _VALIDATED_PROJ_DATA
global _USER_PROJ_DATA
internal_datadir = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "proj_dir", "share", "proj"
)
proj_lib_dirs = os.environ.get("PROJ_LIB", "")
def valid_data_dir(potential_data_dir):
if potential_data_dir is not None and os.path.exists(
os.path.join(potential_data_dir, "proj.db")
):
return True
return False
def valid_data_dirs(potential_data_dirs):
if potential_data_dirs is None:
return False
for proj_data_dir in potential_data_dirs.split(os.pathsep):
if valid_data_dir(proj_data_dir):
return True
break
return None
if valid_data_dirs(_USER_PROJ_DATA):
_VALIDATED_PROJ_DATA = _USER_PROJ_DATA
elif valid_data_dir(internal_datadir):
_VALIDATED_PROJ_DATA = internal_datadir
elif valid_data_dirs(proj_lib_dirs):
_VALIDATED_PROJ_DATA = proj_lib_dirs
else:
proj_exe = find_executable("proj")
if proj_exe is not None:
system_proj_dir = os.path.join(
os.path.dirname(os.path.dirname(proj_exe)), "share", "proj"
)
if valid_data_dir(system_proj_dir):
_VALIDATED_PROJ_DATA = system_proj_dir
if _VALIDATED_PROJ_DATA is None:
raise DataDirError(
"Valid PROJ data directory not found. "
"Either set the path using the environmental variable PROJ_LIB or "
"with `pyproj.datadir.set_data_dir`."
)
return _VALIDATED_PROJ_DATA
|
[{'piece_type': 'error message', 'piece_content': '97%|█████████████████████████████████▊ | 88243/91210 [00:26<00:00, 6190.94it/s]\\nCRSs instantiated: 507\\nCRSs instantiated (cache hits included): 88603\\nTransformers instantiated: 502\\nTransformers instantiated (cache hits included): 88389\\n---------------------------------------------------------------------------\\nProjError Traceback (most recent call last)\\n... <snip> ...\\n~/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/pyproj/transformer.py in from_proj(proj_from, proj_to, skip_equivalent, always_xy)\\n\\npyproj/_transformer.pyx in pyproj._transformer._Transformer.from_crs()\\n\\nProjError: Error creating CRS to CRS.: (Internal Proj Error: proj_create: no dat\\nabase context specified)\\n\\nIn [2]:\\nDo you really want to exit ([y]/n)?\\n\\nError in atexit._run_exitfuncs:\\nTraceback (most recent call last):\\nFile "/home/elias/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/IPython/core/history.py", line 578, in end_session\\nsqlite3.OperationalError: unable to open database file'}, {'piece_type': 'source code', 'piece_content': 'import pyproj, tqdm\\nUSE_CACHE = True # without USE_CACHE, this is painfully slow.\\nwkt_cache = {}; transformer_cache = {}\\n# Keep track of the number of invocations:\\ntransforms = transforms_with_cache = 0\\ncrss_created = crss_created_with_cache = 0\\n\\ndef get_crs(code):\\nglobal crss_created\\nif code in wkt_cache: return wkt_cache[code]\\ntry: crs = pyproj.CRS.from_authority(\\'esri\\', code)\\nexcept: crs = pyproj.CRS.from_epsg(code)\\nif USE_CACHE: wkt_cache[code] = crs\\ncrss_created += 1\\nreturn crs\\n\\n# lines = [next(open(\\'wkts.txt\\', \\'rt\\'))] * 200_000 # This does not trigger the bug\\nlines = open(\\'wkts.txt\\', \\'rt\\').readlines()\\nproj_wgs84 = pyproj.Proj("+init=epsg:4326")\\n\\ndef main(lines):\\nglobal crss_created, crss_created_with_cache, transforms_with_cache, transforms\\nfor line in tqdm.tqdm(lines):\\ntry:\\nkey = wkid = int(line.strip())\\ncrs = get_crs(wkid)\\nexcept ValueError:\\nkey = wkt = line.strip()\\nif wkt in wkt_cache:\\ncrs = wkt_cache[wkt]\\nelse:\\ncrs = wkt_cache[wkt] = pyproj.CRS.from_wkt(wkt)\\ncrss_created += 1\\n\\ncrss_created_with_cache += 1\\ntry:\\nif USE_CACHE and key in transformer_cache:\\nt = transformer_cache[key]\\nelse:\\nt = transformer_cache[key] = pyproj.Transformer.from_proj(crs, proj_wgs84)\\ntransforms += 1\\ntransforms_with_cache += 1\\nexcept Exception as ex:\\nif \\'Input is not a transformation\\' not in str(ex): raise\\n\\ntry:\\nmain(lines)\\nfinally:\\nprint(\\'CRSs instantiated:\\', crss_created)\\nprint(\\'CRSs instantiated (cache hits included):\\', crss_created_with_cache)\\nprint(\\'Transformers instantiated:\\', transforms)\\nprint(\\'Transformers instantiated (cache hits included):\\', transforms_with_cache)'}, {'piece_type': 'other', 'piece_content': 'System:\\npython: 3.7.3 (default, Apr 3 2019, 19:16:38) [GCC 8.0.1 20180414 (experimental) [trunk revision 259383]]\\nexecutable: /home/elias/.local/share/virtualenvs/bug-Ew6sNC7W/bin/python\\nmachine: Linux-4.15.0-54-generic-x86_64-with-Ubuntu-18.04-bionic\\n\\nPROJ:\\nPROJ: 6.1.0\\ndata dir: /home/elias/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/pyproj/proj_dir/share/proj\\n\\nPython deps:\\npyproj: 2.2.1\\npip: 19.1.1\\nsetuptools: 41.0.1\\nCython: None\\naenum: None```\\n\\nconda:'}, {'piece_type': 'other', 'piece_content': "#### Installation method\\n- conda, pip wheel, from source, etc...\\n\\n#### Conda environment information (if you installed with conda):\\n\\n<br/>\\nNB: conda environment was created with: conda install -c conda-forge 'pyproj>2.2' numpy\\nEnvironment (<code>conda list</code>):\\n<details>"}, {'piece_type': 'other', 'piece_content': '</details>\\n\\n<br/>\\nDetails about <code>conda</code> and system ( <code>conda info</code> ):\\n<details>'}, {'piece_type': 'other', 'piece_content': '</details>'}]
|
97%|█████████████████████████████████▊ | 88243/91210 [00:26<00:00, 6190.94it/s]
CRSs instantiated: 507
CRSs instantiated (cache hits included): 88603
Transformers instantiated: 502
Transformers instantiated (cache hits included): 88389
---------------------------------------------------------------------------
ProjError Traceback (most recent call last)
... <snip> ...
~/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/pyproj/transformer.py in from_proj(proj_from, proj_to, skip_equivalent, always_xy)
pyproj/_transformer.pyx in pyproj._transformer._Transformer.from_crs()
ProjError: Error creating CRS to CRS.: (Internal Proj Error: proj_create: no dat
abase context specified)
In [2]:
Do you really want to exit ([y]/n)?
Error in atexit._run_exitfuncs:
Traceback (most recent call last):
File "/home/elias/.local/share/virtualenvs/bug-Ew6sNC7W/lib/python3.7/site-packages/IPython/core/history.py", line 578, in end_session
sqlite3.OperationalError: unable to open database file
|
ProjError
|
def from_proj(proj_from, proj_to, skip_equivalent=False, always_xy=False):
"""Make a Transformer from a :obj:`~pyproj.proj.Proj` or input used to create one.
Parameters
----------
proj_from: :obj:`~pyproj.proj.Proj` or input used to create one
Projection of input data.
proj_to: :obj:`~pyproj.proj.Proj` or input used to create one
Projection of output data.
skip_equivalent: bool, optional
If true, will skip the transformation operation if input and output
projections are equivalent. Default is false.
always_xy: bool, optional
If true, the transform method will accept as input and return as output
coordinates using the traditional GIS order, that is longitude, latitude
for geographic CRS and easting, northing for most projected CRS.
Default is false.
Returns
-------
:obj:`~Transformer`
"""
if not isinstance(proj_from, Proj):
proj_from = Proj(proj_from)
if not isinstance(proj_to, Proj):
proj_to = Proj(proj_to)
return Transformer(
_Transformer.from_crs(
proj_from.crs,
proj_to.crs,
skip_equivalent=skip_equivalent,
always_xy=always_xy,
)
)
|
def from_proj(proj_from, proj_to, skip_equivalent=False, always_xy=False):
"""Make a Transformer from a :obj:`~pyproj.proj.Proj` or input used to create one.
Parameters
----------
proj_from: :obj:`~pyproj.proj.Proj` or input used to create one
Projection of input data.
proj_to: :obj:`~pyproj.proj.Proj` or input used to create one
Projection of output data.
skip_equivalent: bool, optional
If true, will skip the transformation operation if input and output
projections are equivalent. Default is false.
always_xy: bool, optional
If true, the transform method will accept as input and return as output
coordinates using the traditional GIS order, that is longitude, latitude
for geographic CRS and easting, northing for most projected CRS.
Default is false.
Returns
-------
:obj:`~Transformer`
"""
if not isinstance(proj_from, Proj):
proj_from = Proj(proj_from)
if not isinstance(proj_to, Proj):
proj_to = Proj(proj_to)
transformer = Transformer()
transformer._transformer = _Transformer.from_crs(
proj_from.crs,
proj_to.crs,
skip_equivalent=skip_equivalent,
always_xy=always_xy,
)
return transformer
|
[{'piece_type': 'error message', 'piece_content': "In [4]: t = pyproj.Transformer()\\n\\nIn [5]: t\\nOut[5]: <pyproj.transformer.Transformer at 0x7fd75ff9b860>\\n\\nIn [6]: t.transform(0, 0)\\n---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-6-65405fa99360> in <module>\\n----> 1 t.transform(0, 0)\\n\\n~/scipy/repos/pyproj/pyproj/transformer.py in transform(self, xx, yy, zz, tt, radians, errcheck, direction)\\n207 intime = None\\n208 # call pj_transform. inx,iny,inz buffers modified in place.\\n--> 209 self._transformer._transform(\\n210 inx,\\n211 iny,\\n\\nAttributeError: 'Transformer' object has no attribute '_transformer'"}]
|
In [4]: t = pyproj.Transformer()
In [5]: t
Out[5]: <pyproj.transformer.Transformer at 0x7fd75ff9b860>
In [6]: t.transform(0, 0)
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-6-65405fa99360> in <module>
----> 1 t.transform(0, 0)
~/scipy/repos/pyproj/pyproj/transformer.py in transform(self, xx, yy, zz, tt, radians, errcheck, direction)
207 intime = None
208 # call pj_transform. inx,iny,inz buffers modified in place.
--> 209 self._transformer._transform(
210 inx,
211 iny,
AttributeError: 'Transformer' object has no attribute '_transformer'
|
AttributeError
|
def from_crs(crs_from, crs_to, skip_equivalent=False, always_xy=False):
"""Make a Transformer from a :obj:`~pyproj.crs.CRS` or input used to create one.
Parameters
----------
crs_from: ~pyproj.crs.CRS or input used to create one
Projection of input data.
crs_to: ~pyproj.crs.CRS or input used to create one
Projection of output data.
skip_equivalent: bool, optional
If true, will skip the transformation operation if input and output
projections are equivalent. Default is false.
always_xy: bool, optional
If true, the transform method will accept as input and return as output
coordinates using the traditional GIS order, that is longitude, latitude
for geographic CRS and easting, northing for most projected CRS.
Default is false.
Returns
-------
:obj:`~Transformer`
"""
transformer = Transformer(
_Transformer.from_crs(
CRS.from_user_input(crs_from),
CRS.from_user_input(crs_to),
skip_equivalent=skip_equivalent,
always_xy=always_xy,
)
)
return transformer
|
def from_crs(crs_from, crs_to, skip_equivalent=False, always_xy=False):
"""Make a Transformer from a :obj:`~pyproj.crs.CRS` or input used to create one.
Parameters
----------
crs_from: ~pyproj.crs.CRS or input used to create one
Projection of input data.
crs_to: ~pyproj.crs.CRS or input used to create one
Projection of output data.
skip_equivalent: bool, optional
If true, will skip the transformation operation if input and output
projections are equivalent. Default is false.
always_xy: bool, optional
If true, the transform method will accept as input and return as output
coordinates using the traditional GIS order, that is longitude, latitude
for geographic CRS and easting, northing for most projected CRS.
Default is false.
Returns
-------
:obj:`~Transformer`
"""
transformer = Transformer()
transformer._transformer = _Transformer.from_crs(
CRS.from_user_input(crs_from),
CRS.from_user_input(crs_to),
skip_equivalent=skip_equivalent,
always_xy=always_xy,
)
return transformer
|
[{'piece_type': 'error message', 'piece_content': "In [4]: t = pyproj.Transformer()\\n\\nIn [5]: t\\nOut[5]: <pyproj.transformer.Transformer at 0x7fd75ff9b860>\\n\\nIn [6]: t.transform(0, 0)\\n---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-6-65405fa99360> in <module>\\n----> 1 t.transform(0, 0)\\n\\n~/scipy/repos/pyproj/pyproj/transformer.py in transform(self, xx, yy, zz, tt, radians, errcheck, direction)\\n207 intime = None\\n208 # call pj_transform. inx,iny,inz buffers modified in place.\\n--> 209 self._transformer._transform(\\n210 inx,\\n211 iny,\\n\\nAttributeError: 'Transformer' object has no attribute '_transformer'"}]
|
In [4]: t = pyproj.Transformer()
In [5]: t
Out[5]: <pyproj.transformer.Transformer at 0x7fd75ff9b860>
In [6]: t.transform(0, 0)
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-6-65405fa99360> in <module>
----> 1 t.transform(0, 0)
~/scipy/repos/pyproj/pyproj/transformer.py in transform(self, xx, yy, zz, tt, radians, errcheck, direction)
207 intime = None
208 # call pj_transform. inx,iny,inz buffers modified in place.
--> 209 self._transformer._transform(
210 inx,
211 iny,
AttributeError: 'Transformer' object has no attribute '_transformer'
|
AttributeError
|
def from_pipeline(proj_pipeline):
"""Make a Transformer from a PROJ pipeline string.
https://proj4.org/operations/pipeline.html
Parameters
----------
proj_pipeline: str
Projection pipeline string.
Returns
-------
~Transformer
"""
return Transformer(_Transformer.from_pipeline(cstrencode(proj_pipeline)))
|
def from_pipeline(proj_pipeline):
"""Make a Transformer from a PROJ pipeline string.
https://proj4.org/operations/pipeline.html
Parameters
----------
proj_pipeline: str
Projection pipeline string.
Returns
-------
~Transformer
"""
transformer = Transformer()
transformer._transformer = _Transformer.from_pipeline(cstrencode(proj_pipeline))
return transformer
|
[{'piece_type': 'error message', 'piece_content': "In [4]: t = pyproj.Transformer()\\n\\nIn [5]: t\\nOut[5]: <pyproj.transformer.Transformer at 0x7fd75ff9b860>\\n\\nIn [6]: t.transform(0, 0)\\n---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-6-65405fa99360> in <module>\\n----> 1 t.transform(0, 0)\\n\\n~/scipy/repos/pyproj/pyproj/transformer.py in transform(self, xx, yy, zz, tt, radians, errcheck, direction)\\n207 intime = None\\n208 # call pj_transform. inx,iny,inz buffers modified in place.\\n--> 209 self._transformer._transform(\\n210 inx,\\n211 iny,\\n\\nAttributeError: 'Transformer' object has no attribute '_transformer'"}]
|
In [4]: t = pyproj.Transformer()
In [5]: t
Out[5]: <pyproj.transformer.Transformer at 0x7fd75ff9b860>
In [6]: t.transform(0, 0)
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-6-65405fa99360> in <module>
----> 1 t.transform(0, 0)
~/scipy/repos/pyproj/pyproj/transformer.py in transform(self, xx, yy, zz, tt, radians, errcheck, direction)
207 intime = None
208 # call pj_transform. inx,iny,inz buffers modified in place.
--> 209 self._transformer._transform(
210 inx,
211 iny,
AttributeError: 'Transformer' object has no attribute '_transformer'
|
AttributeError
|
def _dict2string(projparams):
# convert a dict to a proj4 string.
pjargs = []
proj_inserted = False
for key, value in projparams.items():
# the towgs84 as list
if isinstance(value, (list, tuple)):
value = ",".join([str(val) for val in value])
# issue 183 (+ no_rot)
if value is None or value is True:
pjargs.append("+{key}".format(key=key))
elif value is False:
pass
# make sure string starts with proj or init
elif not proj_inserted and key in ("init", "proj"):
pjargs.insert(0, "+{key}={value}".format(key=key, value=value))
proj_inserted = True
else:
pjargs.append("+{key}={value}".format(key=key, value=value))
return " ".join(pjargs)
|
def _dict2string(projparams):
# convert a dict to a proj4 string.
pjargs = []
for key, value in projparams.items():
# the towgs84 as list
if isinstance(value, (list, tuple)):
value = ",".join([str(val) for val in value])
# issue 183 (+ no_rot)
if value is None or value is True:
pjargs.append("+" + key + " ")
elif value is False:
pass
else:
pjargs.append("+" + key + "=" + str(value) + " ")
return "".join(pjargs)
|
[{'piece_type': 'error message', 'piece_content': 'from pyproj import Proj\\nProj({\\'a\\': 6371229.0, \\'b\\': 6371229.0, \\'lon_0\\': -10.0, \\'o_lat_p\\': 30.0, \\'o_lon_p\\': 0.0, \\'o_proj\\': \\'longlat\\', \\'proj\\'\\n: \\'ob_tran\\'})\\nTraceback (most recent call last):\\nFile "<stdin>", line 1, in <module>\\nFile ".../lib/python3.7/site-packages/pyproj/proj.py", line 303, in __init__\\ncstrencode(self.crs.to_proj4().replace("+type=crs", "").strip())\\nAttributeError: \\'NoneType\\' object has no attribute \\'replace\\''}]
|
from pyproj import Proj
Proj({'a': 6371229.0, 'b': 6371229.0, 'lon_0': -10.0, 'o_lat_p': 30.0, 'o_lon_p': 0.0, 'o_proj': 'longlat', 'proj'
: 'ob_tran'})
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File ".../lib/python3.7/site-packages/pyproj/proj.py", line 303, in __init__
cstrencode(self.crs.to_proj4().replace("+type=crs", "").strip())
AttributeError: 'NoneType' object has no attribute 'replace'
|
AttributeError
|
def __init__(self, projparams=None, preserve_units=True, **kwargs):
"""
initialize a Proj class instance.
See the proj documentation (https://github.com/OSGeo/proj.4/wiki)
for more information about projection parameters.
Parameters
----------
projparams: int, str, dict, pyproj.CRS
A proj.4 or WKT string, proj.4 dict, EPSG integer, or a pyproj.CRS instnace.
preserve_units: bool
If false, will ensure +units=m.
**kwargs:
proj.4 projection parameters.
Example usage:
>>> from pyproj import Proj
>>> p = Proj(proj='utm',zone=10,ellps='WGS84', preserve_units=False) # use kwargs
>>> x,y = p(-120.108, 34.36116666)
>>> 'x=%9.3f y=%11.3f' % (x,y)
'x=765975.641 y=3805993.134'
>>> 'lon=%8.3f lat=%5.3f' % p(x,y,inverse=True)
'lon=-120.108 lat=34.361'
>>> # do 3 cities at a time in a tuple (Fresno, LA, SF)
>>> lons = (-119.72,-118.40,-122.38)
>>> lats = (36.77, 33.93, 37.62 )
>>> x,y = p(lons, lats)
>>> 'x: %9.3f %9.3f %9.3f' % x
'x: 792763.863 925321.537 554714.301'
>>> 'y: %9.3f %9.3f %9.3f' % y
'y: 4074377.617 3763936.941 4163835.303'
>>> lons, lats = p(x, y, inverse=True) # inverse transform
>>> 'lons: %8.3f %8.3f %8.3f' % lons
'lons: -119.720 -118.400 -122.380'
>>> 'lats: %8.3f %8.3f %8.3f' % lats
'lats: 36.770 33.930 37.620'
>>> p2 = Proj('+proj=utm +zone=10 +ellps=WGS84', preserve_units=False) # use proj4 string
>>> x,y = p2(-120.108, 34.36116666)
>>> 'x=%9.3f y=%11.3f' % (x,y)
'x=765975.641 y=3805993.134'
>>> p = Proj(init="epsg:32667", preserve_units=False)
>>> 'x=%12.3f y=%12.3f (meters)' % p(-114.057222, 51.045)
'x=-1783506.250 y= 6193827.033 (meters)'
>>> p = Proj("+init=epsg:32667")
>>> 'x=%12.3f y=%12.3f (feet)' % p(-114.057222, 51.045)
'x=-5851386.754 y=20320914.191 (feet)'
>>> # test data with radian inputs
>>> p1 = Proj(init="epsg:4214")
>>> x1, y1 = p1(116.366, 39.867)
>>> '{:.3f} {:.3f}'.format(x1, y1)
'2.031 0.696'
>>> x2, y2 = p1(x1, y1, inverse=True)
>>> '{:.3f} {:.3f}'.format(x2, y2)
'116.366 39.867'
"""
self.crs = CRS.from_user_input(projparams if projparams is not None else kwargs)
# make sure units are meters if preserve_units is False.
if not preserve_units and "foot" in self.crs.axis_info[0].unit_name:
projstring = self.crs.to_proj4(4)
projstring = re.sub(r"\\s\\+units=[\\w-]+", "", projstring)
projstring += " +units=m"
self.crs = CRS(projstring)
super(Proj, self).__init__(
cstrencode(
(self.crs.to_proj4() or self.crs.srs).replace("+type=crs", "").strip()
)
)
|
def __init__(self, projparams=None, preserve_units=True, **kwargs):
"""
initialize a Proj class instance.
See the proj documentation (https://github.com/OSGeo/proj.4/wiki)
for more information about projection parameters.
Parameters
----------
projparams: int, str, dict, pyproj.CRS
A proj.4 or WKT string, proj.4 dict, EPSG integer, or a pyproj.CRS instnace.
preserve_units: bool
If false, will ensure +units=m.
**kwargs:
proj.4 projection parameters.
Example usage:
>>> from pyproj import Proj
>>> p = Proj(proj='utm',zone=10,ellps='WGS84', preserve_units=False) # use kwargs
>>> x,y = p(-120.108, 34.36116666)
>>> 'x=%9.3f y=%11.3f' % (x,y)
'x=765975.641 y=3805993.134'
>>> 'lon=%8.3f lat=%5.3f' % p(x,y,inverse=True)
'lon=-120.108 lat=34.361'
>>> # do 3 cities at a time in a tuple (Fresno, LA, SF)
>>> lons = (-119.72,-118.40,-122.38)
>>> lats = (36.77, 33.93, 37.62 )
>>> x,y = p(lons, lats)
>>> 'x: %9.3f %9.3f %9.3f' % x
'x: 792763.863 925321.537 554714.301'
>>> 'y: %9.3f %9.3f %9.3f' % y
'y: 4074377.617 3763936.941 4163835.303'
>>> lons, lats = p(x, y, inverse=True) # inverse transform
>>> 'lons: %8.3f %8.3f %8.3f' % lons
'lons: -119.720 -118.400 -122.380'
>>> 'lats: %8.3f %8.3f %8.3f' % lats
'lats: 36.770 33.930 37.620'
>>> p2 = Proj('+proj=utm +zone=10 +ellps=WGS84', preserve_units=False) # use proj4 string
>>> x,y = p2(-120.108, 34.36116666)
>>> 'x=%9.3f y=%11.3f' % (x,y)
'x=765975.641 y=3805993.134'
>>> p = Proj(init="epsg:32667", preserve_units=False)
>>> 'x=%12.3f y=%12.3f (meters)' % p(-114.057222, 51.045)
'x=-1783506.250 y= 6193827.033 (meters)'
>>> p = Proj("+init=epsg:32667")
>>> 'x=%12.3f y=%12.3f (feet)' % p(-114.057222, 51.045)
'x=-5851386.754 y=20320914.191 (feet)'
>>> # test data with radian inputs
>>> p1 = Proj(init="epsg:4214")
>>> x1, y1 = p1(116.366, 39.867)
>>> '{:.3f} {:.3f}'.format(x1, y1)
'2.031 0.696'
>>> x2, y2 = p1(x1, y1, inverse=True)
>>> '{:.3f} {:.3f}'.format(x2, y2)
'116.366 39.867'
"""
self.crs = CRS.from_user_input(projparams if projparams is not None else kwargs)
# make sure units are meters if preserve_units is False.
if not preserve_units and "foot" in self.crs.axis_info[0].unit_name:
projstring = self.crs.to_proj4(4)
projstring = re.sub(r"\\s\\+units=[\\w-]+", "", projstring)
projstring += " +units=m"
self.crs = CRS(projstring)
super(Proj, self).__init__(
cstrencode(self.crs.to_proj4().replace("+type=crs", "").strip())
)
|
[{'piece_type': 'error message', 'piece_content': 'from pyproj import Proj\\nProj({\\'a\\': 6371229.0, \\'b\\': 6371229.0, \\'lon_0\\': -10.0, \\'o_lat_p\\': 30.0, \\'o_lon_p\\': 0.0, \\'o_proj\\': \\'longlat\\', \\'proj\\'\\n: \\'ob_tran\\'})\\nTraceback (most recent call last):\\nFile "<stdin>", line 1, in <module>\\nFile ".../lib/python3.7/site-packages/pyproj/proj.py", line 303, in __init__\\ncstrencode(self.crs.to_proj4().replace("+type=crs", "").strip())\\nAttributeError: \\'NoneType\\' object has no attribute \\'replace\\''}]
|
from pyproj import Proj
Proj({'a': 6371229.0, 'b': 6371229.0, 'lon_0': -10.0, 'o_lat_p': 30.0, 'o_lon_p': 0.0, 'o_proj': 'longlat', 'proj'
: 'ob_tran'})
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File ".../lib/python3.7/site-packages/pyproj/proj.py", line 303, in __init__
cstrencode(self.crs.to_proj4().replace("+type=crs", "").strip())
AttributeError: 'NoneType' object has no attribute 'replace'
|
AttributeError
|
def Kuf_conv_patch(inducing_variable, kernel, Xnew):
Xp = kernel.get_patches(Xnew) # [N, num_patches, patch_len]
bigKzx = kernel.base_kernel.K(
inducing_variable.Z, Xp
) # [M, N, P] -- thanks to broadcasting of kernels
Kzx = tf.reduce_sum(bigKzx * kernel.weights if hasattr(kernel, "weights") else bigKzx, [2])
return Kzx / kernel.num_patches
|
def Kuf_conv_patch(feat, kern, Xnew):
Xp = kern.get_patches(Xnew) # [N, num_patches, patch_len]
bigKzx = kern.base_kernel.K(feat.Z, Xp) # [M, N, P] -- thanks to broadcasting of kernels
Kzx = tf.reduce_sum(bigKzx * kern.weights if hasattr(kern, "weights") else bigKzx, [2])
return Kzx / kern.num_patches
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def Kuu_kernel_inducingpoints(inducing_variable: InducingPoints, kernel: Kernel, *, jitter=0.0):
Kzz = kernel(inducing_variable.Z)
Kzz += jitter * tf.eye(inducing_variable.num_inducing, dtype=Kzz.dtype)
return Kzz
|
def Kuu_kernel_inducingpoints(inducing_variable: InducingPoints, kernel: Kernel, *, jitter=0.0):
Kzz = kernel(inducing_variable.Z)
Kzz += jitter * tf.eye(len(inducing_variable), dtype=Kzz.dtype)
return Kzz
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def Kuu_sqexp_multiscale(inducing_variable: Multiscale, kernel: SquaredExponential, *, jitter=0.0):
Zmu, Zlen = kernel.slice(inducing_variable.Z, inducing_variable.scales)
idlengthscales2 = tf.square(kernel.lengthscales + Zlen)
sc = tf.sqrt(
idlengthscales2[None, ...] + idlengthscales2[:, None, ...] - kernel.lengthscales ** 2
)
d = inducing_variable._cust_square_dist(Zmu, Zmu, sc)
Kzz = kernel.variance * tf.exp(-d / 2) * tf.reduce_prod(kernel.lengthscales / sc, 2)
Kzz += jitter * tf.eye(inducing_variable.num_inducing, dtype=Kzz.dtype)
return Kzz
|
def Kuu_sqexp_multiscale(inducing_variable: Multiscale, kernel: SquaredExponential, *, jitter=0.0):
Zmu, Zlen = kernel.slice(inducing_variable.Z, inducing_variable.scales)
idlengthscales2 = tf.square(kernel.lengthscales + Zlen)
sc = tf.sqrt(
idlengthscales2[None, ...] + idlengthscales2[:, None, ...] - kernel.lengthscales ** 2
)
d = inducing_variable._cust_square_dist(Zmu, Zmu, sc)
Kzz = kernel.variance * tf.exp(-d / 2) * tf.reduce_prod(kernel.lengthscales / sc, 2)
Kzz += jitter * tf.eye(len(inducing_variable), dtype=Kzz.dtype)
return Kzz
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def Kuu_conv_patch(inducing_variable, kernel, jitter=0.0):
return kernel.base_kernel.K(inducing_variable.Z) + jitter * tf.eye(
inducing_variable.num_inducing, dtype=default_float()
)
|
def Kuu_conv_patch(feat, kern, jitter=0.0):
return kern.base_kernel.K(feat.Z) + jitter * tf.eye(len(feat), dtype=default_float())
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def _Kuu(
inducing_variable: FallbackSeparateIndependentInducingVariables,
kernel: Union[SeparateIndependent, LinearCoregionalization],
*,
jitter=0.0,
):
Kmms = [Kuu(f, k) for f, k in zip(inducing_variable.inducing_variable_list, kernel.kernels)]
Kmm = tf.stack(Kmms, axis=0) # [L, M, M]
jittermat = tf.eye(inducing_variable.num_inducing, dtype=Kmm.dtype)[None, :, :] * jitter
return Kmm + jittermat
|
def _Kuu(
inducing_variable: FallbackSeparateIndependentInducingVariables,
kernel: Union[SeparateIndependent, LinearCoregionalization],
*,
jitter=0.0,
):
Kmms = [Kuu(f, k) for f, k in zip(inducing_variable.inducing_variable_list, kernel.kernels)]
Kmm = tf.stack(Kmms, axis=0) # [L, M, M]
jittermat = tf.eye(len(inducing_variable), dtype=Kmm.dtype)[None, :, :] * jitter
return Kmm + jittermat
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def __init__(self, Z: TensorData, name: Optional[str] = None):
"""
:param Z: the initial positions of the inducing points, size [M, D]
"""
super().__init__(name=name)
if not isinstance(Z, (tf.Variable, tfp.util.TransformedVariable)):
Z = Parameter(Z)
self.Z = Z
|
def __init__(self, Z: TensorData, name: Optional[str] = None):
"""
:param Z: the initial positions of the inducing points, size [M, D]
"""
super().__init__(name=name)
self.Z = Parameter(Z, dtype=default_float())
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def __len__(self) -> int:
return tf.shape(self.Z)[0]
|
def __len__(self) -> int:
return self.Z.shape[0]
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def __len__(self) -> int:
return self.inducing_variable.num_inducing
|
def __len__(self) -> int:
return len(self.inducing_variable)
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def __len__(self) -> int:
# TODO(st--) we should check that they all have the same length...
return self.inducing_variable_list[0].num_inducing
|
def __len__(self) -> int:
return len(self.inducing_variable_list[0])
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def __init__(
self,
distribution_class: Type[tfp.distributions.Distribution] = tfp.distributions.Normal,
scale_transform: Optional[tfp.bijectors.Bijector] = None,
**kwargs,
):
"""
:param distribution_class: distribution class parameterized by `loc` and `scale`
as first and second argument, respectively.
:param scale_transform: callable/bijector applied to the latent
function modelling the scale to ensure its positivity.
Typically, `tf.exp` or `tf.softplus`, but can be any function f: R -> R^+. Defaults to exp if not explicitly specified.
"""
if scale_transform is None:
scale_transform = positive(base="exp")
self.scale_transform = scale_transform
def conditional_distribution(Fs) -> tfp.distributions.Distribution:
tf.debugging.assert_equal(tf.shape(Fs)[-1], 2)
loc = Fs[..., :1]
scale = self.scale_transform(Fs[..., 1:])
return distribution_class(loc, scale)
super().__init__(
latent_dim=2, conditional_distribution=conditional_distribution, **kwargs,
)
|
def __init__(
self,
distribution_class: Type[tfp.distributions.Distribution] = tfp.distributions.Normal,
scale_transform: tfp.bijectors.Bijector = positive(base="exp"),
**kwargs,
):
"""
:param distribution_class: distribution class parameterized by `loc` and `scale`
as first and second argument, respectively.
:param scale_transform: callable/bijector applied to the latent
function modelling the scale to ensure its positivity.
Typically, `tf.exp` or `tf.softplus`, but can be any function f: R -> R^+.
"""
def conditional_distribution(Fs) -> tfp.distributions.Distribution:
tf.debugging.assert_equal(tf.shape(Fs)[-1], 2)
loc = Fs[..., :1]
scale = scale_transform(Fs[..., 1:])
return distribution_class(loc, scale)
super().__init__(
latent_dim=2, conditional_distribution=conditional_distribution, **kwargs,
)
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def conditional_distribution(Fs) -> tfp.distributions.Distribution:
tf.debugging.assert_equal(tf.shape(Fs)[-1], 2)
loc = Fs[..., :1]
scale = self.scale_transform(Fs[..., 1:])
return distribution_class(loc, scale)
|
def conditional_distribution(Fs) -> tfp.distributions.Distribution:
tf.debugging.assert_equal(tf.shape(Fs)[-1], 2)
loc = Fs[..., :1]
scale = scale_transform(Fs[..., 1:])
return distribution_class(loc, scale)
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def elbo(self) -> tf.Tensor:
"""
Construct a tensorflow function to compute the bound on the marginal
likelihood.
"""
Y_data = self.data
pX = DiagonalGaussian(self.X_data_mean, self.X_data_var)
num_inducing = self.inducing_variable.num_inducing
psi0 = tf.reduce_sum(expectation(pX, self.kernel))
psi1 = expectation(pX, (self.kernel, self.inducing_variable))
psi2 = tf.reduce_sum(
expectation(
pX, (self.kernel, self.inducing_variable), (self.kernel, self.inducing_variable)
),
axis=0,
)
cov_uu = covariances.Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
L = tf.linalg.cholesky(cov_uu)
sigma2 = self.likelihood.variance
sigma = tf.sqrt(sigma2)
# Compute intermediate matrices
A = tf.linalg.triangular_solve(L, tf.transpose(psi1), lower=True) / sigma
tmp = tf.linalg.triangular_solve(L, psi2, lower=True)
AAT = tf.linalg.triangular_solve(L, tf.transpose(tmp), lower=True) / sigma2
B = AAT + tf.eye(num_inducing, dtype=default_float())
LB = tf.linalg.cholesky(B)
log_det_B = 2.0 * tf.reduce_sum(tf.math.log(tf.linalg.diag_part(LB)))
c = tf.linalg.triangular_solve(LB, tf.linalg.matmul(A, Y_data), lower=True) / sigma
# KL[q(x) || p(x)]
dX_data_var = (
self.X_data_var
if self.X_data_var.shape.ndims == 2
else tf.linalg.diag_part(self.X_data_var)
)
NQ = to_default_float(tf.size(self.X_data_mean))
D = to_default_float(tf.shape(Y_data)[1])
KL = -0.5 * tf.reduce_sum(tf.math.log(dX_data_var))
KL += 0.5 * tf.reduce_sum(tf.math.log(self.X_prior_var))
KL -= 0.5 * NQ
KL += 0.5 * tf.reduce_sum(
(tf.square(self.X_data_mean - self.X_prior_mean) + dX_data_var) / self.X_prior_var
)
# compute log marginal bound
ND = to_default_float(tf.size(Y_data))
bound = -0.5 * ND * tf.math.log(2 * np.pi * sigma2)
bound += -0.5 * D * log_det_B
bound += -0.5 * tf.reduce_sum(tf.square(Y_data)) / sigma2
bound += 0.5 * tf.reduce_sum(tf.square(c))
bound += -0.5 * D * (tf.reduce_sum(psi0) / sigma2 - tf.reduce_sum(tf.linalg.diag_part(AAT)))
bound -= KL
return bound
|
def elbo(self) -> tf.Tensor:
"""
Construct a tensorflow function to compute the bound on the marginal
likelihood.
"""
Y_data = self.data
pX = DiagonalGaussian(self.X_data_mean, self.X_data_var)
num_inducing = len(self.inducing_variable)
psi0 = tf.reduce_sum(expectation(pX, self.kernel))
psi1 = expectation(pX, (self.kernel, self.inducing_variable))
psi2 = tf.reduce_sum(
expectation(
pX, (self.kernel, self.inducing_variable), (self.kernel, self.inducing_variable)
),
axis=0,
)
cov_uu = covariances.Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
L = tf.linalg.cholesky(cov_uu)
sigma2 = self.likelihood.variance
sigma = tf.sqrt(sigma2)
# Compute intermediate matrices
A = tf.linalg.triangular_solve(L, tf.transpose(psi1), lower=True) / sigma
tmp = tf.linalg.triangular_solve(L, psi2, lower=True)
AAT = tf.linalg.triangular_solve(L, tf.transpose(tmp), lower=True) / sigma2
B = AAT + tf.eye(num_inducing, dtype=default_float())
LB = tf.linalg.cholesky(B)
log_det_B = 2.0 * tf.reduce_sum(tf.math.log(tf.linalg.diag_part(LB)))
c = tf.linalg.triangular_solve(LB, tf.linalg.matmul(A, Y_data), lower=True) / sigma
# KL[q(x) || p(x)]
dX_data_var = (
self.X_data_var
if self.X_data_var.shape.ndims == 2
else tf.linalg.diag_part(self.X_data_var)
)
NQ = to_default_float(tf.size(self.X_data_mean))
D = to_default_float(tf.shape(Y_data)[1])
KL = -0.5 * tf.reduce_sum(tf.math.log(dX_data_var))
KL += 0.5 * tf.reduce_sum(tf.math.log(self.X_prior_var))
KL -= 0.5 * NQ
KL += 0.5 * tf.reduce_sum(
(tf.square(self.X_data_mean - self.X_prior_mean) + dX_data_var) / self.X_prior_var
)
# compute log marginal bound
ND = to_default_float(tf.size(Y_data))
bound = -0.5 * ND * tf.math.log(2 * np.pi * sigma2)
bound += -0.5 * D * log_det_B
bound += -0.5 * tf.reduce_sum(tf.square(Y_data)) / sigma2
bound += 0.5 * tf.reduce_sum(tf.square(c))
bound += -0.5 * D * (tf.reduce_sum(psi0) / sigma2 - tf.reduce_sum(tf.linalg.diag_part(AAT)))
bound -= KL
return bound
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def predict_f(
self, Xnew: InputData, full_cov: bool = False, full_output_cov: bool = False
) -> MeanAndVariance:
"""
Compute the mean and variance of the latent function at some new points.
Note that this is very similar to the SGPR prediction, for which
there are notes in the SGPR notebook.
Note: This model does not allow full output covariances.
:param Xnew: points at which to predict
"""
if full_output_cov:
raise NotImplementedError
pX = DiagonalGaussian(self.X_data_mean, self.X_data_var)
Y_data = self.data
num_inducing = self.inducing_variable.num_inducing
psi1 = expectation(pX, (self.kernel, self.inducing_variable))
psi2 = tf.reduce_sum(
expectation(
pX, (self.kernel, self.inducing_variable), (self.kernel, self.inducing_variable)
),
axis=0,
)
jitter = default_jitter()
Kus = covariances.Kuf(self.inducing_variable, self.kernel, Xnew)
sigma2 = self.likelihood.variance
sigma = tf.sqrt(sigma2)
L = tf.linalg.cholesky(covariances.Kuu(self.inducing_variable, self.kernel, jitter=jitter))
A = tf.linalg.triangular_solve(L, tf.transpose(psi1), lower=True) / sigma
tmp = tf.linalg.triangular_solve(L, psi2, lower=True)
AAT = tf.linalg.triangular_solve(L, tf.transpose(tmp), lower=True) / sigma2
B = AAT + tf.eye(num_inducing, dtype=default_float())
LB = tf.linalg.cholesky(B)
c = tf.linalg.triangular_solve(LB, tf.linalg.matmul(A, Y_data), lower=True) / sigma
tmp1 = tf.linalg.triangular_solve(L, Kus, lower=True)
tmp2 = tf.linalg.triangular_solve(LB, tmp1, lower=True)
mean = tf.linalg.matmul(tmp2, c, transpose_a=True)
if full_cov:
var = (
self.kernel(Xnew)
+ tf.linalg.matmul(tmp2, tmp2, transpose_a=True)
- tf.linalg.matmul(tmp1, tmp1, transpose_a=True)
)
shape = tf.stack([1, 1, tf.shape(Y_data)[1]])
var = tf.tile(tf.expand_dims(var, 2), shape)
else:
var = (
self.kernel(Xnew, full_cov=False)
+ tf.reduce_sum(tf.square(tmp2), axis=0)
- tf.reduce_sum(tf.square(tmp1), axis=0)
)
shape = tf.stack([1, tf.shape(Y_data)[1]])
var = tf.tile(tf.expand_dims(var, 1), shape)
return mean + self.mean_function(Xnew), var
|
def predict_f(
self, Xnew: InputData, full_cov: bool = False, full_output_cov: bool = False
) -> MeanAndVariance:
"""
Compute the mean and variance of the latent function at some new points.
Note that this is very similar to the SGPR prediction, for which
there are notes in the SGPR notebook.
Note: This model does not allow full output covariances.
:param Xnew: points at which to predict
"""
if full_output_cov:
raise NotImplementedError
pX = DiagonalGaussian(self.X_data_mean, self.X_data_var)
Y_data = self.data
num_inducing = len(self.inducing_variable)
psi1 = expectation(pX, (self.kernel, self.inducing_variable))
psi2 = tf.reduce_sum(
expectation(
pX, (self.kernel, self.inducing_variable), (self.kernel, self.inducing_variable)
),
axis=0,
)
jitter = default_jitter()
Kus = covariances.Kuf(self.inducing_variable, self.kernel, Xnew)
sigma2 = self.likelihood.variance
sigma = tf.sqrt(sigma2)
L = tf.linalg.cholesky(covariances.Kuu(self.inducing_variable, self.kernel, jitter=jitter))
A = tf.linalg.triangular_solve(L, tf.transpose(psi1), lower=True) / sigma
tmp = tf.linalg.triangular_solve(L, psi2, lower=True)
AAT = tf.linalg.triangular_solve(L, tf.transpose(tmp), lower=True) / sigma2
B = AAT + tf.eye(num_inducing, dtype=default_float())
LB = tf.linalg.cholesky(B)
c = tf.linalg.triangular_solve(LB, tf.linalg.matmul(A, Y_data), lower=True) / sigma
tmp1 = tf.linalg.triangular_solve(L, Kus, lower=True)
tmp2 = tf.linalg.triangular_solve(LB, tmp1, lower=True)
mean = tf.linalg.matmul(tmp2, c, transpose_a=True)
if full_cov:
var = (
self.kernel(Xnew)
+ tf.linalg.matmul(tmp2, tmp2, transpose_a=True)
- tf.linalg.matmul(tmp1, tmp1, transpose_a=True)
)
shape = tf.stack([1, 1, tf.shape(Y_data)[1]])
var = tf.tile(tf.expand_dims(var, 2), shape)
else:
var = (
self.kernel(Xnew, full_cov=False)
+ tf.reduce_sum(tf.square(tmp2), axis=0)
- tf.reduce_sum(tf.square(tmp1), axis=0)
)
shape = tf.stack([1, tf.shape(Y_data)[1]])
var = tf.tile(tf.expand_dims(var, 1), shape)
return mean + self.mean_function(Xnew), var
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
inducing_variable: Optional[InducingPoints] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
Z is a data matrix, of inducing inputs, size [M, D]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = data_input_to_tensor(data)
self.num_data = data[0].shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
self.V = Parameter(np.zeros((self.inducing_variable.num_inducing, self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
inducing_variable: Optional[InducingPoints] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
Z is a data matrix, of inducing inputs, size [M, D]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = data_input_to_tensor(data)
self.num_data = data[0].shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
self.V = Parameter(np.zeros((len(self.inducing_variable), self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def upper_bound(self) -> tf.Tensor:
"""
Upper bound for the sparse GP regression marginal likelihood. Note that
the same inducing points are used for calculating the upper bound, as are
used for computing the likelihood approximation. This may not lead to the
best upper bound. The upper bound can be tightened by optimising Z, just
like the lower bound. This is especially important in FITC, as FITC is
known to produce poor inducing point locations. An optimisable upper bound
can be found in https://github.com/markvdw/gp_upper.
The key reference is
::
@misc{titsias_2014,
title={Variational Inference for Gaussian and Determinantal Point Processes},
url={http://www2.aueb.gr/users/mtitsias/papers/titsiasNipsVar14.pdf},
publisher={Workshop on Advances in Variational Inference (NIPS 2014)},
author={Titsias, Michalis K.},
year={2014},
month={Dec}
}
The key quantity, the trace term, can be computed via
>>> _, v = conditionals.conditional(X, model.inducing_variable.Z, model.kernel,
... np.zeros((model.inducing_variable.num_inducing, 1)))
which computes each individual element of the trace term.
"""
X_data, Y_data = self.data
num_data = to_default_float(tf.shape(Y_data)[0])
Kdiag = self.kernel(X_data, full_cov=False)
kuu = Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
kuf = Kuf(self.inducing_variable, self.kernel, X_data)
I = tf.eye(tf.shape(kuu)[0], dtype=default_float())
L = tf.linalg.cholesky(kuu)
A = tf.linalg.triangular_solve(L, kuf, lower=True)
AAT = tf.linalg.matmul(A, A, transpose_b=True)
B = I + AAT / self.likelihood.variance
LB = tf.linalg.cholesky(B)
# Using the Trace bound, from Titsias' presentation
c = tf.reduce_sum(Kdiag) - tf.reduce_sum(tf.square(A))
# Alternative bound on max eigenval:
corrected_noise = self.likelihood.variance + c
const = -0.5 * num_data * tf.math.log(2 * np.pi * self.likelihood.variance)
logdet = -tf.reduce_sum(tf.math.log(tf.linalg.diag_part(LB)))
err = Y_data - self.mean_function(X_data)
LC = tf.linalg.cholesky(I + AAT / corrected_noise)
v = tf.linalg.triangular_solve(LC, tf.linalg.matmul(A, err) / corrected_noise, lower=True)
quad = -0.5 * tf.reduce_sum(tf.square(err)) / corrected_noise + 0.5 * tf.reduce_sum(
tf.square(v)
)
return const + logdet + quad
|
def upper_bound(self) -> tf.Tensor:
"""
Upper bound for the sparse GP regression marginal likelihood. Note that
the same inducing points are used for calculating the upper bound, as are
used for computing the likelihood approximation. This may not lead to the
best upper bound. The upper bound can be tightened by optimising Z, just
like the lower bound. This is especially important in FITC, as FITC is
known to produce poor inducing point locations. An optimisable upper bound
can be found in https://github.com/markvdw/gp_upper.
The key reference is
::
@misc{titsias_2014,
title={Variational Inference for Gaussian and Determinantal Point Processes},
url={http://www2.aueb.gr/users/mtitsias/papers/titsiasNipsVar14.pdf},
publisher={Workshop on Advances in Variational Inference (NIPS 2014)},
author={Titsias, Michalis K.},
year={2014},
month={Dec}
}
The key quantity, the trace term, can be computed via
>>> _, v = conditionals.conditional(X, model.inducing_variable.Z, model.kernel,
... np.zeros((len(model.inducing_variable), 1)))
which computes each individual element of the trace term.
"""
X_data, Y_data = self.data
num_data = to_default_float(tf.shape(Y_data)[0])
Kdiag = self.kernel(X_data, full_cov=False)
kuu = Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
kuf = Kuf(self.inducing_variable, self.kernel, X_data)
I = tf.eye(tf.shape(kuu)[0], dtype=default_float())
L = tf.linalg.cholesky(kuu)
A = tf.linalg.triangular_solve(L, kuf, lower=True)
AAT = tf.linalg.matmul(A, A, transpose_b=True)
B = I + AAT / self.likelihood.variance
LB = tf.linalg.cholesky(B)
# Using the Trace bound, from Titsias' presentation
c = tf.reduce_sum(Kdiag) - tf.reduce_sum(tf.square(A))
# Alternative bound on max eigenval:
corrected_noise = self.likelihood.variance + c
const = -0.5 * num_data * tf.math.log(2 * np.pi * self.likelihood.variance)
logdet = -tf.reduce_sum(tf.math.log(tf.linalg.diag_part(LB)))
err = Y_data - self.mean_function(X_data)
LC = tf.linalg.cholesky(I + AAT / corrected_noise)
v = tf.linalg.triangular_solve(LC, tf.linalg.matmul(A, err) / corrected_noise, lower=True)
quad = -0.5 * tf.reduce_sum(tf.square(err)) / corrected_noise + 0.5 * tf.reduce_sum(
tf.square(v)
)
return const + logdet + quad
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def elbo(self) -> tf.Tensor:
"""
Construct a tensorflow function to compute the bound on the marginal
likelihood. For a derivation of the terms in here, see the associated
SGPR notebook.
"""
X_data, Y_data = self.data
num_inducing = self.inducing_variable.num_inducing
num_data = to_default_float(tf.shape(Y_data)[0])
output_dim = to_default_float(tf.shape(Y_data)[1])
err = Y_data - self.mean_function(X_data)
Kdiag = self.kernel(X_data, full_cov=False)
kuf = Kuf(self.inducing_variable, self.kernel, X_data)
kuu = Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
L = tf.linalg.cholesky(kuu)
sigma = tf.sqrt(self.likelihood.variance)
# Compute intermediate matrices
A = tf.linalg.triangular_solve(L, kuf, lower=True) / sigma
AAT = tf.linalg.matmul(A, A, transpose_b=True)
B = AAT + tf.eye(num_inducing, dtype=default_float())
LB = tf.linalg.cholesky(B)
Aerr = tf.linalg.matmul(A, err)
c = tf.linalg.triangular_solve(LB, Aerr, lower=True) / sigma
# compute log marginal bound
bound = -0.5 * num_data * output_dim * np.log(2 * np.pi)
bound += tf.negative(output_dim) * tf.reduce_sum(tf.math.log(tf.linalg.diag_part(LB)))
bound -= 0.5 * num_data * output_dim * tf.math.log(self.likelihood.variance)
bound += -0.5 * tf.reduce_sum(tf.square(err)) / self.likelihood.variance
bound += 0.5 * tf.reduce_sum(tf.square(c))
bound += -0.5 * output_dim * tf.reduce_sum(Kdiag) / self.likelihood.variance
bound += 0.5 * output_dim * tf.reduce_sum(tf.linalg.diag_part(AAT))
return bound
|
def elbo(self) -> tf.Tensor:
"""
Construct a tensorflow function to compute the bound on the marginal
likelihood. For a derivation of the terms in here, see the associated
SGPR notebook.
"""
X_data, Y_data = self.data
num_inducing = len(self.inducing_variable)
num_data = to_default_float(tf.shape(Y_data)[0])
output_dim = to_default_float(tf.shape(Y_data)[1])
err = Y_data - self.mean_function(X_data)
Kdiag = self.kernel(X_data, full_cov=False)
kuf = Kuf(self.inducing_variable, self.kernel, X_data)
kuu = Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
L = tf.linalg.cholesky(kuu)
sigma = tf.sqrt(self.likelihood.variance)
# Compute intermediate matrices
A = tf.linalg.triangular_solve(L, kuf, lower=True) / sigma
AAT = tf.linalg.matmul(A, A, transpose_b=True)
B = AAT + tf.eye(num_inducing, dtype=default_float())
LB = tf.linalg.cholesky(B)
Aerr = tf.linalg.matmul(A, err)
c = tf.linalg.triangular_solve(LB, Aerr, lower=True) / sigma
# compute log marginal bound
bound = -0.5 * num_data * output_dim * np.log(2 * np.pi)
bound += tf.negative(output_dim) * tf.reduce_sum(tf.math.log(tf.linalg.diag_part(LB)))
bound -= 0.5 * num_data * output_dim * tf.math.log(self.likelihood.variance)
bound += -0.5 * tf.reduce_sum(tf.square(err)) / self.likelihood.variance
bound += 0.5 * tf.reduce_sum(tf.square(c))
bound += -0.5 * output_dim * tf.reduce_sum(Kdiag) / self.likelihood.variance
bound += 0.5 * output_dim * tf.reduce_sum(tf.linalg.diag_part(AAT))
return bound
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def predict_f(self, Xnew: InputData, full_cov=False, full_output_cov=False) -> MeanAndVariance:
"""
Compute the mean and variance of the latent function at some new points
Xnew. For a derivation of the terms in here, see the associated SGPR
notebook.
"""
X_data, Y_data = self.data
num_inducing = self.inducing_variable.num_inducing
err = Y_data - self.mean_function(X_data)
kuf = Kuf(self.inducing_variable, self.kernel, X_data)
kuu = Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
Kus = Kuf(self.inducing_variable, self.kernel, Xnew)
sigma = tf.sqrt(self.likelihood.variance)
L = tf.linalg.cholesky(kuu)
A = tf.linalg.triangular_solve(L, kuf, lower=True) / sigma
B = tf.linalg.matmul(A, A, transpose_b=True) + tf.eye(num_inducing, dtype=default_float())
LB = tf.linalg.cholesky(B)
Aerr = tf.linalg.matmul(A, err)
c = tf.linalg.triangular_solve(LB, Aerr, lower=True) / sigma
tmp1 = tf.linalg.triangular_solve(L, Kus, lower=True)
tmp2 = tf.linalg.triangular_solve(LB, tmp1, lower=True)
mean = tf.linalg.matmul(tmp2, c, transpose_a=True)
if full_cov:
var = (
self.kernel(Xnew)
+ tf.linalg.matmul(tmp2, tmp2, transpose_a=True)
- tf.linalg.matmul(tmp1, tmp1, transpose_a=True)
)
var = tf.tile(var[None, ...], [self.num_latent_gps, 1, 1]) # [P, N, N]
else:
var = (
self.kernel(Xnew, full_cov=False)
+ tf.reduce_sum(tf.square(tmp2), 0)
- tf.reduce_sum(tf.square(tmp1), 0)
)
var = tf.tile(var[:, None], [1, self.num_latent_gps])
return mean + self.mean_function(Xnew), var
|
def predict_f(self, Xnew: InputData, full_cov=False, full_output_cov=False) -> MeanAndVariance:
"""
Compute the mean and variance of the latent function at some new points
Xnew. For a derivation of the terms in here, see the associated SGPR
notebook.
"""
X_data, Y_data = self.data
num_inducing = len(self.inducing_variable)
err = Y_data - self.mean_function(X_data)
kuf = Kuf(self.inducing_variable, self.kernel, X_data)
kuu = Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
Kus = Kuf(self.inducing_variable, self.kernel, Xnew)
sigma = tf.sqrt(self.likelihood.variance)
L = tf.linalg.cholesky(kuu)
A = tf.linalg.triangular_solve(L, kuf, lower=True) / sigma
B = tf.linalg.matmul(A, A, transpose_b=True) + tf.eye(num_inducing, dtype=default_float())
LB = tf.linalg.cholesky(B)
Aerr = tf.linalg.matmul(A, err)
c = tf.linalg.triangular_solve(LB, Aerr, lower=True) / sigma
tmp1 = tf.linalg.triangular_solve(L, Kus, lower=True)
tmp2 = tf.linalg.triangular_solve(LB, tmp1, lower=True)
mean = tf.linalg.matmul(tmp2, c, transpose_a=True)
if full_cov:
var = (
self.kernel(Xnew)
+ tf.linalg.matmul(tmp2, tmp2, transpose_a=True)
- tf.linalg.matmul(tmp1, tmp1, transpose_a=True)
)
var = tf.tile(var[None, ...], [self.num_latent_gps, 1, 1]) # [P, N, N]
else:
var = (
self.kernel(Xnew, full_cov=False)
+ tf.reduce_sum(tf.square(tmp2), 0)
- tf.reduce_sum(tf.square(tmp1), 0)
)
var = tf.tile(var[:, None], [1, self.num_latent_gps])
return mean + self.mean_function(Xnew), var
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def common_terms(self):
X_data, Y_data = self.data
num_inducing = self.inducing_variable.num_inducing
err = Y_data - self.mean_function(X_data) # size [N, R]
Kdiag = self.kernel(X_data, full_cov=False)
kuf = Kuf(self.inducing_variable, self.kernel, X_data)
kuu = Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
Luu = tf.linalg.cholesky(kuu) # => Luu Luu^T = kuu
V = tf.linalg.triangular_solve(Luu, kuf) # => V^T V = Qff = kuf^T kuu^-1 kuf
diagQff = tf.reduce_sum(tf.square(V), 0)
nu = Kdiag - diagQff + self.likelihood.variance
B = tf.eye(num_inducing, dtype=default_float()) + tf.linalg.matmul(
V / nu, V, transpose_b=True
)
L = tf.linalg.cholesky(B)
beta = err / tf.expand_dims(nu, 1) # size [N, R]
alpha = tf.linalg.matmul(V, beta) # size [N, R]
gamma = tf.linalg.triangular_solve(L, alpha, lower=True) # size [N, R]
return err, nu, Luu, L, alpha, beta, gamma
|
def common_terms(self):
X_data, Y_data = self.data
num_inducing = len(self.inducing_variable)
err = Y_data - self.mean_function(X_data) # size [N, R]
Kdiag = self.kernel(X_data, full_cov=False)
kuf = Kuf(self.inducing_variable, self.kernel, X_data)
kuu = Kuu(self.inducing_variable, self.kernel, jitter=default_jitter())
Luu = tf.linalg.cholesky(kuu) # => Luu Luu^T = kuu
V = tf.linalg.triangular_solve(Luu, kuf) # => V^T V = Qff = kuf^T kuu^-1 kuf
diagQff = tf.reduce_sum(tf.square(V), 0)
nu = Kdiag - diagQff + self.likelihood.variance
B = tf.eye(num_inducing, dtype=default_float()) + tf.linalg.matmul(
V / nu, V, transpose_b=True
)
L = tf.linalg.cholesky(B)
beta = err / tf.expand_dims(nu, 1) # size [N, R]
alpha = tf.linalg.matmul(V, beta) # size [N, R]
gamma = tf.linalg.triangular_solve(L, alpha, lower=True) # size [N, R]
return err, nu, Luu, L, alpha, beta, gamma
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def __init__(
self,
kernel,
likelihood,
inducing_variable,
*,
mean_function=None,
num_latent_gps: int = 1,
q_diag: bool = False,
q_mu=None,
q_sqrt=None,
whiten: bool = True,
num_data=None,
):
"""
- kernel, likelihood, inducing_variables, mean_function are appropriate
GPflow objects
- num_latent_gps is the number of latent processes to use, defaults to 1
- q_diag is a boolean. If True, the covariance is approximated by a
diagonal matrix.
- whiten is a boolean. If True, we use the whitened representation of
the inducing points.
- num_data is the total number of observations, defaults to X.shape[0]
(relevant when feeding in external minibatches)
"""
# init the super class, accept args
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.num_data = num_data
self.q_diag = q_diag
self.whiten = whiten
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
# init variational parameters
num_inducing = self.inducing_variable.num_inducing
self._init_variational_parameters(num_inducing, q_mu, q_sqrt, q_diag)
|
def __init__(
self,
kernel,
likelihood,
inducing_variable,
*,
mean_function=None,
num_latent_gps: int = 1,
q_diag: bool = False,
q_mu=None,
q_sqrt=None,
whiten: bool = True,
num_data=None,
):
"""
- kernel, likelihood, inducing_variables, mean_function are appropriate
GPflow objects
- num_latent_gps is the number of latent processes to use, defaults to 1
- q_diag is a boolean. If True, the covariance is approximated by a
diagonal matrix.
- whiten is a boolean. If True, we use the whitened representation of
the inducing points.
- num_data is the total number of observations, defaults to X.shape[0]
(relevant when feeding in external minibatches)
"""
# init the super class, accept args
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.num_data = num_data
self.q_diag = q_diag
self.whiten = whiten
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
# init variational parameters
num_inducing = len(self.inducing_variable)
self._init_variational_parameters(num_inducing, q_mu, q_sqrt, q_diag)
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nimport tensorflow as tf\\n\\nclass VariableInducingPoints(gpflow.inducing_variables.InducingPoints):\\ndef __init__(self, Z, name=None):\\nsuper().__init__(Z, name=name)\\n# overwrite with Variable with None as first element in shape so\\n# we can assign arrays with arbitrary length along this dimension:\\nself.Z = tf.Variable(Z, trainable=False, dtype=gpflow.default_float(),\\nshape=(None, Z.shape[1])\\n)\\n\\ndef __len__(self):\\nreturn tf.shape(self.Z)[0] # dynamic shape\\n# instead of the static shape returned by the InducingPoints parent class\\n\\nX, Y = np.random.randn(50, 2), np.random.randn(50, 1)\\nZ1 = np.random.randn(13, 2)\\n\\nk = gpflow.kernels.SquaredExponential()\\nm = gpflow.models.SGPR(data=(X, Y), kernel=k, inducing_variable=VariableInducingPoints(Z1))\\n\\nZ2 = np.random.randn(29, 2)\\nm.inducing_variable.Z.assign(Z2)\\n\\nopt = tf.optimizers.Adam()\\n\\n@tf.function\\ndef optimization_step():\\nopt.minimize(m.training_loss, m.trainable_variables)\\n\\nfor _ in range(iter):\\noptimization_step()'}, {'piece_type': 'error message', 'piece_content': 'TypeError Traceback (most recent call last)\\n<ipython-input-24-9a082736eedc> in <module>\\n38\\n39\\n---> 40 optimization_step()\\n41\\n42\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)\\n778 else:\\n779 compiler = "nonXla"\\n--> 780 result = self._call(*args, **kwds)\\n781\\n782 new_tracing_count = self._get_tracing_count()\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)\\n821 # This is the first call of __call__, so we have to initialize.\\n822 initializers = []\\n--> 823 self._initialize(args, kwds, add_initializers_to=initializers)\\n824 finally:\\n825 # At this point we know that the initialization is complete (or less\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)\\n694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)\\n695 self._concrete_stateful_fn = (\\n--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\\n697 *args, **kwds))\\n698\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)\\n2853 args, kwargs = None, None\\n2854 with self._lock:\\n-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)\\n2856 return graph_function\\n2857\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)\\n3211\\n3212 self._function_cache.missed.add(call_context_key)\\n-> 3213 graph_function = self._create_graph_function(args, kwargs)\\n3214 self._function_cache.primary[cache_key] = graph_function\\n3215 return graph_function, args, kwargs\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)\\n3063 arg_names = base_arg_names + missing_arg_names\\n3064 graph_function = ConcreteFunction(\\n-> 3065 func_graph_module.func_graph_from_py_func(\\n3066 self._name,\\n3067 self._python_function,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\\n984 _, original_func = tf_decorator.unwrap(python_func)\\n985\\n--> 986 func_outputs = python_func(*func_args, **func_kwargs)\\n987\\n988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)\\n598 # __wrapped__ allows AutoGraph to swap in a converted function. We give\\n599 # the function a weak reference to itself to avoid a reference cycle.\\n--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)\\n601 weak_wrapped_fn = weakref.ref(wrapped_fn)\\n602\\n\\n~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)\\n971 except Exception as e: # pylint:disable=broad-except\\n972 if hasattr(e, "ag_error_metadata"):\\n--> 973 raise e.ag_error_metadata.to_exception(e)\\n974 else:\\n975 raise\\n\\nTypeError: in user code:\\n\\n<ipython-input-24-9a082736eedc>:32 optimization_step *\\noptimizer.minimize(m.training_loss, m.trainable_variables)\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **\\ngrads_and_vars = self._compute_gradients(\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients\\nloss_value = loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss\\nreturn self._training_loss()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss\\nreturn -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective\\nreturn self.elbo()\\n/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo\\nnum_inducing = len(self.inducing_variable)\\n\\nTypeError: \\'Tensor\\' object cannot be interpreted as an integer'}]
|
TypeError Traceback (most recent call last)
<ipython-input-24-9a082736eedc> in <module>
38
39
---> 40 optimization_step()
41
42
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
TypeError: in user code:
<ipython-input-24-9a082736eedc>:32 optimization_step *
optimizer.minimize(m.training_loss, m.trainable_variables)
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:374 minimize **
grads_and_vars = self._compute_gradients(
/home/maltamirano/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:429 _compute_gradients
loss_value = loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/training_mixins.py:64 training_loss
return self._training_loss()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/model.py:57 _training_loss
return -(self.maximum_log_likelihood_objective(*args, **kwargs) + self.log_prior_density())
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:154 maximum_log_likelihood_objective
return self.elbo()
/home/maltamirano/anaconda3/lib/python3.8/site-packages/gpflow/models/sgpr.py:164 elbo
num_inducing = len(self.inducing_variable)
TypeError: 'Tensor' object cannot be interpreted as an integer
|
TypeError
|
def ndiagquad(funcs, H: int, Fmu, Fvar, logspace: bool = False, **Ys):
"""
Computes N Gaussian expectation integrals of one or more functions
using Gauss-Hermite quadrature. The Gaussians must be independent.
The means and variances of the Gaussians are specified by Fmu and Fvar.
The N-integrals are assumed to be taken wrt the last dimensions of Fmu, Fvar.
:param funcs: the integrand(s):
Callable or Iterable of Callables that operates elementwise
:param H: number of Gauss-Hermite quadrature points
:param Fmu: array/tensor or `Din`-tuple/list thereof
:param Fvar: array/tensor or `Din`-tuple/list thereof
:param logspace: if True, funcs are the log-integrands and this calculates
the log-expectation of exp(funcs)
:param **Ys: arrays/tensors; deterministic arguments to be passed by name
Fmu, Fvar, Ys should all have same shape, with overall size `N`
:return: shape is the same as that of the first Fmu
"""
n_gh = H
if isinstance(Fmu, (tuple, list)):
dim = len(Fmu)
shape = tf.shape(Fmu[0])
Fmu = tf.stack(Fmu, axis=-1)
Fvar = tf.stack(Fvar, axis=-1)
else:
dim = 1
shape = tf.shape(Fmu)
Fmu = tf.reshape(Fmu, (-1, dim))
Fvar = tf.reshape(Fvar, (-1, dim))
Ys = {Yname: tf.reshape(Y, (-1, 1)) for Yname, Y in Ys.items()}
def wrapper(old_fun):
def new_fun(X, **Ys):
Xs = tf.unstack(X, axis=-1)
fun_eval = old_fun(*Xs, **Ys)
return tf.cond(
pred=tf.less(tf.rank(fun_eval), tf.rank(X)),
true_fn=lambda: fun_eval[..., tf.newaxis],
false_fn=lambda: fun_eval,
)
return new_fun
if isinstance(funcs, Iterable):
funcs = [wrapper(f) for f in funcs]
else:
funcs = wrapper(funcs)
quadrature = NDiagGHQuadrature(dim, n_gh)
if logspace:
result = quadrature.logspace(funcs, Fmu, Fvar, **Ys)
else:
result = quadrature(funcs, Fmu, Fvar, **Ys)
if isinstance(result, list):
result = [tf.reshape(r, shape) for r in result]
else:
result = tf.reshape(result, shape)
return result
|
def ndiagquad(funcs, H: int, Fmu, Fvar, logspace: bool = False, **Ys):
"""
Computes N Gaussian expectation integrals of one or more functions
using Gauss-Hermite quadrature. The Gaussians must be independent.
The means and variances of the Gaussians are specified by Fmu and Fvar.
The N-integrals are assumed to be taken wrt the last dimensions of Fmu, Fvar.
:param funcs: the integrand(s):
Callable or Iterable of Callables that operates elementwise
:param H: number of Gauss-Hermite quadrature points
:param Fmu: array/tensor or `Din`-tuple/list thereof
:param Fvar: array/tensor or `Din`-tuple/list thereof
:param logspace: if True, funcs are the log-integrands and this calculates
the log-expectation of exp(funcs)
:param **Ys: arrays/tensors; deterministic arguments to be passed by name
Fmu, Fvar, Ys should all have same shape, with overall size `N`
:return: shape is the same as that of the first Fmu
"""
n_gh = H
if isinstance(Fmu, (tuple, list)):
dim = len(Fmu)
shape = tf.shape(Fmu[0])
Fmu = tf.stack(Fmu, axis=-1)
Fvar = tf.stack(Fvar, axis=-1)
else:
dim = 1
shape = tf.shape(Fmu)
Fmu = tf.reshape(Fmu, (-1, dim))
Fvar = tf.reshape(Fvar, (-1, dim))
Ys = {Yname: tf.reshape(Y, (-1, 1)) for Yname, Y in Ys.items()}
def wrapper(old_fun):
def new_fun(X, **Ys):
Xs = tf.unstack(X, axis=-1)
fun_eval = old_fun(*Xs, **Ys)
if tf.rank(fun_eval) < tf.rank(X):
fun_eval = tf.expand_dims(fun_eval, axis=-1)
return fun_eval
return new_fun
if isinstance(funcs, Iterable):
funcs = [wrapper(f) for f in funcs]
else:
funcs = wrapper(funcs)
quadrature = NDiagGHQuadrature(dim, n_gh)
if logspace:
result = quadrature.logspace(funcs, Fmu, Fvar, **Ys)
else:
result = quadrature(funcs, Fmu, Fvar, **Ys)
if isinstance(result, list):
result = [tf.reshape(r, shape) for r in result]
else:
result = tf.reshape(result, shape)
return result
|
[{'piece_type': 'other', 'piece_content': 'tensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed: AutoGraph is disabled in this function. Try decorating it directly with @tf.function.'}, {'piece_type': 'reproducing source code', 'piece_content': "import tensorflow as tf\\nimport numpy as np\\nfrom gpflow import quadrature\\n\\n\\n@tf.function(autograph=False)\\ndef compute():\\nmu = np.array([1.0, 1.3])\\nvar = np.array([3.0, 3.5])\\nnum_gauss_hermite_points = 25\\nquad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var])\\nreturn quad\\n\\n\\ndef go():\\nquad = compute()\\nprint(f'Result: {quad}')\\n\\n\\ngo()"}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "gpflow_error.py", line 20, in <module>\\ngo()\\nFile "gpflow_error.py", line 16, in go\\nquad = compute()\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 580, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 627, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 506, in _initialize\\n*args, **kwds))\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2446, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2777, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2667, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py", line 981, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 441, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "gpflow_error.py", line 11, in compute\\nquad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var])\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 156, in ndiagquad\\nresult = quadrature(funcs, Fmu, Fvar, **Ys)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in __call__\\nreturn [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in <listcomp>\\nreturn [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 141, in new_fun\\nif tf.rank(fun_eval) < tf.rank(X):\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 778, in __bool__\\nself._disallow_bool_casting()\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 542, in _disallow_bool_casting\\n"using a `tf.Tensor` as a Python `bool`")\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 527, in _disallow_when_autograph_disabled\\n" Try decorating it directly with @tf.function.".format(task))\\ntensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed: AutoGraph is disabled in this function. Try decorating it directly with @tf.function.'}]
|
Traceback (most recent call last):
File "gpflow_error.py", line 20, in <module>
go()
File "gpflow_error.py", line 16, in go
quad = compute()
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 580, in __call__
result = self._call(*args, **kwds)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 627, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 506, in _initialize
*args, **kwds))
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2446, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2777, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2667, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py", line 981, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 441, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "gpflow_error.py", line 11, in compute
quad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var])
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 156, in ndiagquad
result = quadrature(funcs, Fmu, Fvar, **Ys)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in __call__
return [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in <listcomp>
return [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 141, in new_fun
if tf.rank(fun_eval) < tf.rank(X):
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 778, in __bool__
self._disallow_bool_casting()
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 542, in _disallow_bool_casting
"using a `tf.Tensor` as a Python `bool`")
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 527, in _disallow_when_autograph_disabled
" Try decorating it directly with @tf.function.".format(task))
tensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed: AutoGraph is disabled in this function. Try decorating it directly with @tf.function.
|
tensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError
|
def wrapper(old_fun):
def new_fun(X, **Ys):
Xs = tf.unstack(X, axis=-1)
fun_eval = old_fun(*Xs, **Ys)
return tf.cond(
pred=tf.less(tf.rank(fun_eval), tf.rank(X)),
true_fn=lambda: fun_eval[..., tf.newaxis],
false_fn=lambda: fun_eval,
)
return new_fun
|
def wrapper(old_fun):
def new_fun(X, **Ys):
Xs = tf.unstack(X, axis=-1)
fun_eval = old_fun(*Xs, **Ys)
if tf.rank(fun_eval) < tf.rank(X):
fun_eval = tf.expand_dims(fun_eval, axis=-1)
return fun_eval
return new_fun
|
[{'piece_type': 'other', 'piece_content': 'tensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed: AutoGraph is disabled in this function. Try decorating it directly with @tf.function.'}, {'piece_type': 'reproducing source code', 'piece_content': "import tensorflow as tf\\nimport numpy as np\\nfrom gpflow import quadrature\\n\\n\\n@tf.function(autograph=False)\\ndef compute():\\nmu = np.array([1.0, 1.3])\\nvar = np.array([3.0, 3.5])\\nnum_gauss_hermite_points = 25\\nquad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var])\\nreturn quad\\n\\n\\ndef go():\\nquad = compute()\\nprint(f'Result: {quad}')\\n\\n\\ngo()"}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "gpflow_error.py", line 20, in <module>\\ngo()\\nFile "gpflow_error.py", line 16, in go\\nquad = compute()\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 580, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 627, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 506, in _initialize\\n*args, **kwds))\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2446, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2777, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2667, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py", line 981, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 441, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "gpflow_error.py", line 11, in compute\\nquad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var])\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 156, in ndiagquad\\nresult = quadrature(funcs, Fmu, Fvar, **Ys)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in __call__\\nreturn [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in <listcomp>\\nreturn [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 141, in new_fun\\nif tf.rank(fun_eval) < tf.rank(X):\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 778, in __bool__\\nself._disallow_bool_casting()\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 542, in _disallow_bool_casting\\n"using a `tf.Tensor` as a Python `bool`")\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 527, in _disallow_when_autograph_disabled\\n" Try decorating it directly with @tf.function.".format(task))\\ntensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed: AutoGraph is disabled in this function. Try decorating it directly with @tf.function.'}]
|
Traceback (most recent call last):
File "gpflow_error.py", line 20, in <module>
go()
File "gpflow_error.py", line 16, in go
quad = compute()
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 580, in __call__
result = self._call(*args, **kwds)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 627, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 506, in _initialize
*args, **kwds))
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2446, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2777, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2667, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py", line 981, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 441, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "gpflow_error.py", line 11, in compute
quad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var])
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 156, in ndiagquad
result = quadrature(funcs, Fmu, Fvar, **Ys)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in __call__
return [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in <listcomp>
return [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 141, in new_fun
if tf.rank(fun_eval) < tf.rank(X):
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 778, in __bool__
self._disallow_bool_casting()
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 542, in _disallow_bool_casting
"using a `tf.Tensor` as a Python `bool`")
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 527, in _disallow_when_autograph_disabled
" Try decorating it directly with @tf.function.".format(task))
tensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed: AutoGraph is disabled in this function. Try decorating it directly with @tf.function.
|
tensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError
|
def new_fun(X, **Ys):
Xs = tf.unstack(X, axis=-1)
fun_eval = old_fun(*Xs, **Ys)
return tf.cond(
pred=tf.less(tf.rank(fun_eval), tf.rank(X)),
true_fn=lambda: fun_eval[..., tf.newaxis],
false_fn=lambda: fun_eval,
)
|
def new_fun(X, **Ys):
Xs = tf.unstack(X, axis=-1)
fun_eval = old_fun(*Xs, **Ys)
if tf.rank(fun_eval) < tf.rank(X):
fun_eval = tf.expand_dims(fun_eval, axis=-1)
return fun_eval
|
[{'piece_type': 'other', 'piece_content': 'tensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed: AutoGraph is disabled in this function. Try decorating it directly with @tf.function.'}, {'piece_type': 'reproducing source code', 'piece_content': "import tensorflow as tf\\nimport numpy as np\\nfrom gpflow import quadrature\\n\\n\\n@tf.function(autograph=False)\\ndef compute():\\nmu = np.array([1.0, 1.3])\\nvar = np.array([3.0, 3.5])\\nnum_gauss_hermite_points = 25\\nquad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var])\\nreturn quad\\n\\n\\ndef go():\\nquad = compute()\\nprint(f'Result: {quad}')\\n\\n\\ngo()"}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "gpflow_error.py", line 20, in <module>\\ngo()\\nFile "gpflow_error.py", line 16, in go\\nquad = compute()\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 580, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 627, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 506, in _initialize\\n*args, **kwds))\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2446, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2777, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2667, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py", line 981, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 441, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "gpflow_error.py", line 11, in compute\\nquad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var])\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 156, in ndiagquad\\nresult = quadrature(funcs, Fmu, Fvar, **Ys)\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in __call__\\nreturn [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in <listcomp>\\nreturn [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 141, in new_fun\\nif tf.rank(fun_eval) < tf.rank(X):\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 778, in __bool__\\nself._disallow_bool_casting()\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 542, in _disallow_bool_casting\\n"using a `tf.Tensor` as a Python `bool`")\\nFile "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 527, in _disallow_when_autograph_disabled\\n" Try decorating it directly with @tf.function.".format(task))\\ntensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed: AutoGraph is disabled in this function. Try decorating it directly with @tf.function.'}]
|
Traceback (most recent call last):
File "gpflow_error.py", line 20, in <module>
go()
File "gpflow_error.py", line 16, in go
quad = compute()
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 580, in __call__
result = self._call(*args, **kwds)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 627, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 506, in _initialize
*args, **kwds))
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2446, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2777, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/function.py", line 2667, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py", line 981, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py", line 441, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "gpflow_error.py", line 11, in compute
quad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var])
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 156, in ndiagquad
result = quadrature(funcs, Fmu, Fvar, **Ys)
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in __call__
return [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/base.py", line 52, in <listcomp>
return [tf.reduce_sum(f(X, *args, **kwargs) * W, axis=-2) for f in fun]
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/gpflow/quadrature/deprecated.py", line 141, in new_fun
if tf.rank(fun_eval) < tf.rank(X):
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 778, in __bool__
self._disallow_bool_casting()
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 542, in _disallow_bool_casting
"using a `tf.Tensor` as a Python `bool`")
File "/Users/nferguson/gpflow_error/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 527, in _disallow_when_autograph_disabled
" Try decorating it directly with @tf.function.".format(task))
tensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed: AutoGraph is disabled in this function. Try decorating it directly with @tf.function.
|
tensorflow.python.framework.errors_impl.OperatorNotAllowedInGraphError
|
def __init__(
self,
data: OutputData,
latent_dim: int,
X_data_mean: Optional[tf.Tensor] = None,
kernel: Optional[Kernel] = None,
mean_function: Optional[MeanFunction] = None,
):
"""
Initialise GPLVM object. This method only works with a Gaussian likelihood.
:param data: y data matrix, size N (number of points) x D (dimensions)
:param latent_dim: the number of latent dimensions (Q)
:param X_data_mean: latent positions ([N, Q]), for the initialisation of the latent space.
:param kernel: kernel specification, by default Squared Exponential
:param mean_function: mean function, by default None.
"""
if X_data_mean is None:
X_data_mean = pca_reduce(data, latent_dim)
num_latent_gps = X_data_mean.shape[1]
if num_latent_gps != latent_dim:
msg = "Passed in number of latent {0} does not match initial X {1}."
raise ValueError(msg.format(latent_dim, num_latent_gps))
if mean_function is None:
mean_function = Zero()
if kernel is None:
kernel = kernels.SquaredExponential(lengthscales=tf.ones((latent_dim,)))
if data.shape[1] < num_latent_gps:
raise ValueError("More latent dimensions than observed.")
gpr_data = (Parameter(X_data_mean), data_input_to_tensor(data))
super().__init__(gpr_data, kernel, mean_function=mean_function)
|
def __init__(
self,
data: OutputData,
latent_dim: int,
X_data_mean: Optional[tf.Tensor] = None,
kernel: Optional[Kernel] = None,
mean_function: Optional[MeanFunction] = None,
):
"""
Initialise GPLVM object. This method only works with a Gaussian likelihood.
:param data: y data matrix, size N (number of points) x D (dimensions)
:param latent_dim: the number of latent dimensions (Q)
:param X_data_mean: latent positions ([N, Q]), for the initialisation of the latent space.
:param kernel: kernel specification, by default Squared Exponential
:param mean_function: mean function, by default None.
"""
if X_data_mean is None:
X_data_mean = pca_reduce(data, latent_dim)
num_latent_gps = X_data_mean.shape[1]
if num_latent_gps != latent_dim:
msg = "Passed in number of latent {0} does not match initial X {1}."
raise ValueError(msg.format(latent_dim, num_latent_gps))
if mean_function is None:
mean_function = Zero()
if kernel is None:
kernel = kernels.SquaredExponential(lengthscales=tf.ones((latent_dim,)))
if data.shape[1] < num_latent_gps:
raise ValueError("More latent dimensions than observed.")
gpr_data = (Parameter(X_data_mean), data)
super().__init__(gpr_data, kernel, mean_function=mean_function)
|
[{'piece_type': 'other', 'piece_content': 'ValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 177, in <module>\\nmain(args)\\nFile "main.py", line 64, in main\\nbuild_allele(args)\\nFile "/path/to/1_model_sim/drivers.py", line 226, in build_allele\\nopt_model_list(m)\\nFile "/path/to/1_model_sim/model.py", line 355, in opt_model_list\\nm.trainable_variables)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize\\nfunc, initial_params, jac=True, method=method, **scipy_kwargs\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize\\ncallback=callback, **options)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb\\nf, g = func_and_grad(x)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad\\nf = fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper\\nreturn function(*(wrapper_args + args))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__\\nfg = self.fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval\\nloss, grad = _tf_eval(tf.convert_to_tensor(x))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize\\n*args, **kwds))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper\\nraise e.ag_error_metadata.to_exception(e)\\nValueError: in converted code:\\n\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *\\nloss, grads = _compute_loss_and_gradients(closure, variables)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *\\nloss = loss_closure()\\n/path/to/1_model_sim/model.py:354 None *\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *\\nlog_prob = multivariate_normal(Y, m, L)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *\\nd = x - mu\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper\\nx = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor\\nret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function\\nreturn constant_op.constant(value, dtype, name=name)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant\\nallow_broadcast=True)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl\\nallow_broadcast=allow_broadcast))\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto\\n"Cannot create a tensor proto whose content is larger than 2GB.")\\n\\nValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'other', 'piece_content': 'm = gpf.models.GPR(data=(X, Y),\\nkernel=gpf.kernels.Exponential(active_dims = [0,1]),\\nmean_function=None)\\n\\nopt = gpf.optimizers.Scipy()\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\nm.trainable_variables)'}]
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: OutputData,
X_data_mean: tf.Tensor,
X_data_var: tf.Tensor,
kernel: Kernel,
num_inducing_variables: Optional[int] = None,
inducing_variable=None,
X_prior_mean=None,
X_prior_var=None,
):
"""
Initialise Bayesian GPLVM object. This method only works with a Gaussian likelihood.
:param data: data matrix, size N (number of points) x D (dimensions)
:param X_data_mean: initial latent positions, size N (number of points) x Q (latent dimensions).
:param X_data_var: variance of latent positions ([N, Q]), for the initialisation of the latent space.
:param kernel: kernel specification, by default Squared Exponential
:param num_inducing_variables: number of inducing points, M
:param inducing_variable: matrix of inducing points, size M (inducing points) x Q (latent dimensions). By default
random permutation of X_data_mean.
:param X_prior_mean: prior mean used in KL term of bound. By default 0. Same size as X_data_mean.
:param X_prior_var: prior variance used in KL term of bound. By default 1.
"""
num_data, num_latent_gps = X_data_mean.shape
super().__init__(kernel, likelihoods.Gaussian(), num_latent_gps=num_latent_gps)
self.data = data_input_to_tensor(data)
assert X_data_var.ndim == 2
self.X_data_mean = Parameter(X_data_mean)
self.X_data_var = Parameter(X_data_var, transform=positive())
self.num_data = num_data
self.output_dim = self.data.shape[-1]
assert np.all(X_data_mean.shape == X_data_var.shape)
assert X_data_mean.shape[0] == self.data.shape[0], "X mean and Y must be same size."
assert X_data_var.shape[0] == self.data.shape[0], "X var and Y must be same size."
if (inducing_variable is None) == (num_inducing_variables is None):
raise ValueError(
"BayesianGPLVM needs exactly one of `inducing_variable` and `num_inducing_variables`"
)
if inducing_variable is None:
# By default we initialize by subset of initial latent points
# Note that tf.random.shuffle returns a copy, it does not shuffle in-place
Z = tf.random.shuffle(X_data_mean)[:num_inducing_variables]
inducing_variable = InducingPoints(Z)
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
assert X_data_mean.shape[1] == self.num_latent_gps
# deal with parameters for the prior mean variance of X
if X_prior_mean is None:
X_prior_mean = tf.zeros((self.num_data, self.num_latent_gps), dtype=default_float())
if X_prior_var is None:
X_prior_var = tf.ones((self.num_data, self.num_latent_gps))
self.X_prior_mean = tf.convert_to_tensor(np.atleast_1d(X_prior_mean), dtype=default_float())
self.X_prior_var = tf.convert_to_tensor(np.atleast_1d(X_prior_var), dtype=default_float())
assert self.X_prior_mean.shape[0] == self.num_data
assert self.X_prior_mean.shape[1] == self.num_latent_gps
assert self.X_prior_var.shape[0] == self.num_data
assert self.X_prior_var.shape[1] == self.num_latent_gps
|
def __init__(
self,
data: OutputData,
X_data_mean: tf.Tensor,
X_data_var: tf.Tensor,
kernel: Kernel,
num_inducing_variables: Optional[int] = None,
inducing_variable=None,
X_prior_mean=None,
X_prior_var=None,
):
"""
Initialise Bayesian GPLVM object. This method only works with a Gaussian likelihood.
:param data: data matrix, size N (number of points) x D (dimensions)
:param X_data_mean: initial latent positions, size N (number of points) x Q (latent dimensions).
:param X_data_var: variance of latent positions ([N, Q]), for the initialisation of the latent space.
:param kernel: kernel specification, by default Squared Exponential
:param num_inducing_variables: number of inducing points, M
:param inducing_variable: matrix of inducing points, size M (inducing points) x Q (latent dimensions). By default
random permutation of X_data_mean.
:param X_prior_mean: prior mean used in KL term of bound. By default 0. Same size as X_data_mean.
:param X_prior_var: prior variance used in KL term of bound. By default 1.
"""
num_data, num_latent_gps = X_data_mean.shape
super().__init__(kernel, likelihoods.Gaussian(), num_latent_gps=num_latent_gps)
self.data = data
assert X_data_var.ndim == 2
self.X_data_mean = Parameter(X_data_mean)
self.X_data_var = Parameter(X_data_var, transform=positive())
self.num_data = num_data
self.output_dim = data.shape[-1]
assert np.all(X_data_mean.shape == X_data_var.shape)
assert X_data_mean.shape[0] == data.shape[0], "X mean and Y must be same size."
assert X_data_var.shape[0] == data.shape[0], "X var and Y must be same size."
if (inducing_variable is None) == (num_inducing_variables is None):
raise ValueError(
"BayesianGPLVM needs exactly one of `inducing_variable` and `num_inducing_variables`"
)
if inducing_variable is None:
# By default we initialize by subset of initial latent points
# Note that tf.random.shuffle returns a copy, it does not shuffle in-place
Z = tf.random.shuffle(X_data_mean)[:num_inducing_variables]
inducing_variable = InducingPoints(Z)
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
assert X_data_mean.shape[1] == self.num_latent_gps
# deal with parameters for the prior mean variance of X
if X_prior_mean is None:
X_prior_mean = tf.zeros((self.num_data, self.num_latent_gps), dtype=default_float())
if X_prior_var is None:
X_prior_var = tf.ones((self.num_data, self.num_latent_gps))
self.X_prior_mean = tf.convert_to_tensor(np.atleast_1d(X_prior_mean), dtype=default_float())
self.X_prior_var = tf.convert_to_tensor(np.atleast_1d(X_prior_var), dtype=default_float())
assert self.X_prior_mean.shape[0] == self.num_data
assert self.X_prior_mean.shape[1] == self.num_latent_gps
assert self.X_prior_var.shape[0] == self.num_data
assert self.X_prior_var.shape[1] == self.num_latent_gps
|
[{'piece_type': 'other', 'piece_content': 'ValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 177, in <module>\\nmain(args)\\nFile "main.py", line 64, in main\\nbuild_allele(args)\\nFile "/path/to/1_model_sim/drivers.py", line 226, in build_allele\\nopt_model_list(m)\\nFile "/path/to/1_model_sim/model.py", line 355, in opt_model_list\\nm.trainable_variables)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize\\nfunc, initial_params, jac=True, method=method, **scipy_kwargs\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize\\ncallback=callback, **options)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb\\nf, g = func_and_grad(x)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad\\nf = fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper\\nreturn function(*(wrapper_args + args))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__\\nfg = self.fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval\\nloss, grad = _tf_eval(tf.convert_to_tensor(x))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize\\n*args, **kwds))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper\\nraise e.ag_error_metadata.to_exception(e)\\nValueError: in converted code:\\n\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *\\nloss, grads = _compute_loss_and_gradients(closure, variables)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *\\nloss = loss_closure()\\n/path/to/1_model_sim/model.py:354 None *\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *\\nlog_prob = multivariate_normal(Y, m, L)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *\\nd = x - mu\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper\\nx = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor\\nret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function\\nreturn constant_op.constant(value, dtype, name=name)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant\\nallow_broadcast=True)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl\\nallow_broadcast=allow_broadcast))\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto\\n"Cannot create a tensor proto whose content is larger than 2GB.")\\n\\nValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'other', 'piece_content': 'm = gpf.models.GPR(data=(X, Y),\\nkernel=gpf.kernels.Exponential(active_dims = [0,1]),\\nmean_function=None)\\n\\nopt = gpf.optimizers.Scipy()\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\nm.trainable_variables)'}]
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
mean_function: Optional[MeanFunction] = None,
noise_variance: float = 1.0,
):
likelihood = gpflow.likelihoods.Gaussian(noise_variance)
_, Y_data = data
super().__init__(kernel, likelihood, mean_function, num_latent_gps=Y_data.shape[-1])
self.data = data_input_to_tensor(data)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
mean_function: Optional[MeanFunction] = None,
noise_variance: float = 1.0,
):
likelihood = gpflow.likelihoods.Gaussian(noise_variance)
_, Y_data = data
super().__init__(kernel, likelihood, mean_function, num_latent_gps=Y_data.shape[-1])
self.data = data
|
[{'piece_type': 'other', 'piece_content': 'ValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 177, in <module>\\nmain(args)\\nFile "main.py", line 64, in main\\nbuild_allele(args)\\nFile "/path/to/1_model_sim/drivers.py", line 226, in build_allele\\nopt_model_list(m)\\nFile "/path/to/1_model_sim/model.py", line 355, in opt_model_list\\nm.trainable_variables)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize\\nfunc, initial_params, jac=True, method=method, **scipy_kwargs\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize\\ncallback=callback, **options)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb\\nf, g = func_and_grad(x)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad\\nf = fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper\\nreturn function(*(wrapper_args + args))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__\\nfg = self.fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval\\nloss, grad = _tf_eval(tf.convert_to_tensor(x))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize\\n*args, **kwds))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper\\nraise e.ag_error_metadata.to_exception(e)\\nValueError: in converted code:\\n\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *\\nloss, grads = _compute_loss_and_gradients(closure, variables)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *\\nloss = loss_closure()\\n/path/to/1_model_sim/model.py:354 None *\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *\\nlog_prob = multivariate_normal(Y, m, L)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *\\nd = x - mu\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper\\nx = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor\\nret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function\\nreturn constant_op.constant(value, dtype, name=name)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant\\nallow_broadcast=True)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl\\nallow_broadcast=allow_broadcast))\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto\\n"Cannot create a tensor proto whose content is larger than 2GB.")\\n\\nValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'other', 'piece_content': 'm = gpf.models.GPR(data=(X, Y),\\nkernel=gpf.kernels.Exponential(active_dims = [0,1]),\\nmean_function=None)\\n\\nopt = gpf.optimizers.Scipy()\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\nm.trainable_variables)'}]
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
inducing_variable: Optional[InducingPoints] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
Z is a data matrix, of inducing inputs, size [M, D]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = data_input_to_tensor(data)
self.num_data = data[0].shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
self.V = Parameter(np.zeros((len(self.inducing_variable), self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
inducing_variable: Optional[InducingPoints] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
Z is a data matrix, of inducing inputs, size [M, D]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = data
self.num_data = data[0].shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
self.V = Parameter(np.zeros((len(self.inducing_variable), self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
[{'piece_type': 'other', 'piece_content': 'ValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 177, in <module>\\nmain(args)\\nFile "main.py", line 64, in main\\nbuild_allele(args)\\nFile "/path/to/1_model_sim/drivers.py", line 226, in build_allele\\nopt_model_list(m)\\nFile "/path/to/1_model_sim/model.py", line 355, in opt_model_list\\nm.trainable_variables)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize\\nfunc, initial_params, jac=True, method=method, **scipy_kwargs\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize\\ncallback=callback, **options)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb\\nf, g = func_and_grad(x)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad\\nf = fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper\\nreturn function(*(wrapper_args + args))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__\\nfg = self.fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval\\nloss, grad = _tf_eval(tf.convert_to_tensor(x))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize\\n*args, **kwds))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper\\nraise e.ag_error_metadata.to_exception(e)\\nValueError: in converted code:\\n\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *\\nloss, grads = _compute_loss_and_gradients(closure, variables)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *\\nloss = loss_closure()\\n/path/to/1_model_sim/model.py:354 None *\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *\\nlog_prob = multivariate_normal(Y, m, L)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *\\nd = x - mu\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper\\nx = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor\\nret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function\\nreturn constant_op.constant(value, dtype, name=name)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant\\nallow_broadcast=True)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl\\nallow_broadcast=allow_broadcast))\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto\\n"Cannot create a tensor proto whose content is larger than 2GB.")\\n\\nValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'other', 'piece_content': 'm = gpf.models.GPR(data=(X, Y),\\nkernel=gpf.kernels.Exponential(active_dims = [0,1]),\\nmean_function=None)\\n\\nopt = gpf.optimizers.Scipy()\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\nm.trainable_variables)'}]
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
inducing_variable: InducingPoints,
*,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
noise_variance: float = 1.0,
):
"""
`data`: a tuple of (X, Y), where the inputs X has shape [N, D]
and the outputs Y has shape [N, R].
`inducing_variable`: an InducingPoints instance or a matrix of
the pseudo inputs Z, of shape [M, D].
`kernel`, `mean_function` are appropriate GPflow objects
This method only works with a Gaussian likelihood, its variance is
initialized to `noise_variance`.
"""
likelihood = likelihoods.Gaussian(noise_variance)
X_data, Y_data = data_input_to_tensor(data)
num_latent_gps = Y_data.shape[-1] if num_latent_gps is None else num_latent_gps
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = X_data, Y_data
self.num_data = X_data.shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
inducing_variable: InducingPoints,
*,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
noise_variance: float = 1.0,
):
"""
`data`: a tuple of (X, Y), where the inputs X has shape [N, D]
and the outputs Y has shape [N, R].
`inducing_variable`: an InducingPoints instance or a matrix of
the pseudo inputs Z, of shape [M, D].
`kernel`, `mean_function` are appropriate GPflow objects
This method only works with a Gaussian likelihood, its variance is
initialized to `noise_variance`.
"""
likelihood = likelihoods.Gaussian(noise_variance)
X_data, Y_data = data
num_latent_gps = Y_data.shape[-1] if num_latent_gps is None else num_latent_gps
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = data
self.num_data = X_data.shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
|
[{'piece_type': 'other', 'piece_content': 'ValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 177, in <module>\\nmain(args)\\nFile "main.py", line 64, in main\\nbuild_allele(args)\\nFile "/path/to/1_model_sim/drivers.py", line 226, in build_allele\\nopt_model_list(m)\\nFile "/path/to/1_model_sim/model.py", line 355, in opt_model_list\\nm.trainable_variables)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize\\nfunc, initial_params, jac=True, method=method, **scipy_kwargs\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize\\ncallback=callback, **options)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb\\nf, g = func_and_grad(x)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad\\nf = fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper\\nreturn function(*(wrapper_args + args))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__\\nfg = self.fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval\\nloss, grad = _tf_eval(tf.convert_to_tensor(x))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize\\n*args, **kwds))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper\\nraise e.ag_error_metadata.to_exception(e)\\nValueError: in converted code:\\n\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *\\nloss, grads = _compute_loss_and_gradients(closure, variables)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *\\nloss = loss_closure()\\n/path/to/1_model_sim/model.py:354 None *\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *\\nlog_prob = multivariate_normal(Y, m, L)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *\\nd = x - mu\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper\\nx = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor\\nret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function\\nreturn constant_op.constant(value, dtype, name=name)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant\\nallow_broadcast=True)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl\\nallow_broadcast=allow_broadcast))\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto\\n"Cannot create a tensor proto whose content is larger than 2GB.")\\n\\nValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'other', 'piece_content': 'm = gpf.models.GPR(data=(X, Y),\\nkernel=gpf.kernels.Exponential(active_dims = [0,1]),\\nmean_function=None)\\n\\nopt = gpf.optimizers.Scipy()\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\nm.trainable_variables)'}]
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data_input_to_tensor(data)
X_data, Y_data = self.data
num_data = X_data.shape[0]
self.num_data = num_data
self.q_mu = Parameter(np.zeros((num_data, self.num_latent_gps)))
q_sqrt = np.array([np.eye(num_data) for _ in range(self.num_latent_gps)])
self.q_sqrt = Parameter(q_sqrt, transform=triangular())
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
X_data, Y_data = data
num_data = X_data.shape[0]
self.num_data = num_data
self.data = data
self.q_mu = Parameter(np.zeros((num_data, self.num_latent_gps)))
q_sqrt = np.array([np.eye(num_data) for _ in range(self.num_latent_gps)])
self.q_sqrt = Parameter(q_sqrt, transform=triangular())
|
[{'piece_type': 'other', 'piece_content': 'ValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 177, in <module>\\nmain(args)\\nFile "main.py", line 64, in main\\nbuild_allele(args)\\nFile "/path/to/1_model_sim/drivers.py", line 226, in build_allele\\nopt_model_list(m)\\nFile "/path/to/1_model_sim/model.py", line 355, in opt_model_list\\nm.trainable_variables)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize\\nfunc, initial_params, jac=True, method=method, **scipy_kwargs\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize\\ncallback=callback, **options)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb\\nf, g = func_and_grad(x)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad\\nf = fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper\\nreturn function(*(wrapper_args + args))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__\\nfg = self.fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval\\nloss, grad = _tf_eval(tf.convert_to_tensor(x))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize\\n*args, **kwds))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper\\nraise e.ag_error_metadata.to_exception(e)\\nValueError: in converted code:\\n\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *\\nloss, grads = _compute_loss_and_gradients(closure, variables)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *\\nloss = loss_closure()\\n/path/to/1_model_sim/model.py:354 None *\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *\\nlog_prob = multivariate_normal(Y, m, L)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *\\nd = x - mu\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper\\nx = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor\\nret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function\\nreturn constant_op.constant(value, dtype, name=name)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant\\nallow_broadcast=True)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl\\nallow_broadcast=allow_broadcast))\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto\\n"Cannot create a tensor proto whose content is larger than 2GB.")\\n\\nValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'other', 'piece_content': 'm = gpf.models.GPR(data=(X, Y),\\nkernel=gpf.kernels.Exponential(active_dims = [0,1]),\\nmean_function=None)\\n\\nopt = gpf.optimizers.Scipy()\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\nm.trainable_variables)'}]
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data_input_to_tensor(data)
X_data, Y_data = self.data
self.num_data = X_data.shape[0]
self.q_alpha = Parameter(np.zeros((self.num_data, self.num_latent_gps)))
self.q_lambda = Parameter(
np.ones((self.num_data, self.num_latent_gps)), transform=gpflow.utilities.positive()
)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
X_data, Y_data = data
self.data = data
self.num_data = X_data.shape[0]
self.q_alpha = Parameter(np.zeros((self.num_data, self.num_latent_gps)))
self.q_lambda = Parameter(
np.ones((self.num_data, self.num_latent_gps)), transform=gpflow.utilities.positive()
)
|
[{'piece_type': 'other', 'piece_content': 'ValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 177, in <module>\\nmain(args)\\nFile "main.py", line 64, in main\\nbuild_allele(args)\\nFile "/path/to/1_model_sim/drivers.py", line 226, in build_allele\\nopt_model_list(m)\\nFile "/path/to/1_model_sim/model.py", line 355, in opt_model_list\\nm.trainable_variables)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize\\nfunc, initial_params, jac=True, method=method, **scipy_kwargs\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize\\ncallback=callback, **options)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb\\nf, g = func_and_grad(x)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad\\nf = fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper\\nreturn function(*(wrapper_args + args))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__\\nfg = self.fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval\\nloss, grad = _tf_eval(tf.convert_to_tensor(x))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize\\n*args, **kwds))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper\\nraise e.ag_error_metadata.to_exception(e)\\nValueError: in converted code:\\n\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *\\nloss, grads = _compute_loss_and_gradients(closure, variables)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *\\nloss = loss_closure()\\n/path/to/1_model_sim/model.py:354 None *\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *\\nlog_prob = multivariate_normal(Y, m, L)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *\\nd = x - mu\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper\\nx = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor\\nret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function\\nreturn constant_op.constant(value, dtype, name=name)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant\\nallow_broadcast=True)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl\\nallow_broadcast=allow_broadcast))\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto\\n"Cannot create a tensor proto whose content is larger than 2GB.")\\n\\nValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'other', 'piece_content': 'm = gpf.models.GPR(data=(X, Y),\\nkernel=gpf.kernels.Exponential(active_dims = [0,1]),\\nmean_function=None)\\n\\nopt = gpf.optimizers.Scipy()\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\nm.trainable_variables)'}]
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
kernel, likelihood, mean_function are appropriate GPflow objects
This is a vanilla implementation of a GP with a non-Gaussian
likelihood. The latent function values are represented by centered
(whitened) variables, so
v ~ N(0, I)
f = Lv + m(x)
with
L L^T = K
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data_input_to_tensor(data)
self.num_data = self.data[0].shape[0]
self.V = Parameter(np.zeros((self.num_data, self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
kernel, likelihood, mean_function are appropriate GPflow objects
This is a vanilla implementation of a GP with a non-Gaussian
likelihood. The latent function values are represented by centered
(whitened) variables, so
v ~ N(0, I)
f = Lv + m(x)
with
L L^T = K
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data
self.num_data = data[0].shape[0]
self.V = Parameter(np.zeros((self.num_data, self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
[{'piece_type': 'other', 'piece_content': 'ValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 177, in <module>\\nmain(args)\\nFile "main.py", line 64, in main\\nbuild_allele(args)\\nFile "/path/to/1_model_sim/drivers.py", line 226, in build_allele\\nopt_model_list(m)\\nFile "/path/to/1_model_sim/model.py", line 355, in opt_model_list\\nm.trainable_variables)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize\\nfunc, initial_params, jac=True, method=method, **scipy_kwargs\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize\\ncallback=callback, **options)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb\\nf, g = func_and_grad(x)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad\\nf = fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper\\nreturn function(*(wrapper_args + args))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__\\nfg = self.fun(x, *args)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval\\nloss, grad = _tf_eval(tf.convert_to_tensor(x))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__\\nresult = self._call(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call\\nself._initialize(args, kwds, add_initializers_to=initializers)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize\\n*args, **kwds))\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected\\ngraph_function, _, _ = self._maybe_define_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function\\ngraph_function = self._create_graph_function(args, kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function\\ncapture_by_value=self._capture_by_value),\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func\\nfunc_outputs = python_func(*func_args, **func_kwargs)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn\\nreturn weak_wrapped_fn().__wrapped__(*args, **kwds)\\nFile "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper\\nraise e.ag_error_metadata.to_exception(e)\\nValueError: in converted code:\\n\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *\\nloss, grads = _compute_loss_and_gradients(closure, variables)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *\\nloss = loss_closure()\\n/path/to/1_model_sim/model.py:354 None *\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *\\nlog_prob = multivariate_normal(Y, m, L)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *\\nd = x - mu\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper\\nx = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor\\nret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function\\nreturn constant_op.constant(value, dtype, name=name)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant\\nallow_broadcast=True)\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl\\nallow_broadcast=allow_broadcast))\\n/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto\\n"Cannot create a tensor proto whose content is larger than 2GB.")\\n\\nValueError: Cannot create a tensor proto whose content is larger than 2GB.'}, {'piece_type': 'other', 'piece_content': 'm = gpf.models.GPR(data=(X, Y),\\nkernel=gpf.kernels.Exponential(active_dims = [0,1]),\\nmean_function=None)\\n\\nopt = gpf.optimizers.Scipy()\\nopt.minimize(lambda: - m.log_marginal_likelihood(),\\nm.trainable_variables)'}]
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def K(self, X: tf.Tensor, X2: Optional[tf.Tensor] = None) -> tf.Tensor:
sig_X = self._sigmoids(X) # N1 x 1 x Ncp
sig_X2 = self._sigmoids(X2) if X2 is not None else sig_X # N2 x 1 x Ncp
# `starters` are the sigmoids going from 0 -> 1, whilst `stoppers` go
# from 1 -> 0, dimensions are N1 x N2 x Ncp
starters = sig_X * tf.transpose(sig_X2, perm=(1, 0, 2))
stoppers = (1 - sig_X) * tf.transpose((1 - sig_X2), perm=(1, 0, 2))
# prepend `starters` with ones and append ones to `stoppers` since the
# first kernel has no start and the last kernel has no end
N1 = tf.shape(X)[0]
N2 = tf.shape(X2)[0] if X2 is not None else N1
ones = tf.ones((N1, N2, 1), dtype=X.dtype)
starters = tf.concat([ones, starters], axis=2)
stoppers = tf.concat([stoppers, ones], axis=2)
# now combine with the underlying kernels
kernel_stack = tf.stack([k(X, X2) for k in self.kernels], axis=2)
return tf.reduce_sum(kernel_stack * starters * stoppers, axis=2)
|
def K(self, X: tf.Tensor, X2: Optional[tf.Tensor] = None) -> tf.Tensor:
sig_X = self._sigmoids(X) # N x 1 x Ncp
sig_X2 = self._sigmoids(X2) if X2 is not None else sig_X
# `starters` are the sigmoids going from 0 -> 1, whilst `stoppers` go
# from 1 -> 0, dimensions are N x N x Ncp
starters = sig_X * tf.transpose(sig_X2, perm=(1, 0, 2))
stoppers = (1 - sig_X) * tf.transpose((1 - sig_X2), perm=(1, 0, 2))
# prepend `starters` with ones and append ones to `stoppers` since the
# first kernel has no start and the last kernel has no end
N = tf.shape(X)[0]
ones = tf.ones((N, N, 1), dtype=X.dtype)
starters = tf.concat([ones, starters], axis=2)
stoppers = tf.concat([stoppers, ones], axis=2)
# now combine with the underlying kernels
kernel_stack = tf.stack([k(X, X2) for k in self.kernels], axis=2)
return tf.reduce_sum(kernel_stack * starters * stoppers, axis=2)
|
[{'piece_type': 'reproducing source code', 'piece_content': 'import numpy as np\\nimport gpflow\\nX = np.linspace(0,100,100).reshape(100,1)\\nbase_k1 = gpflow.kernels.Matern32(lengthscales=0.2)\\nbase_k2 = gpflow.kernels.Matern32(lengthscales=2.0)\\nk = gpflow.kernels.ChangePoints([base_k1, base_k2], [0.0], steepness=5.0)\\nk(X) # works\\n\\nN = 25 # anything other than N=100 will reproduce the bug\\nxx = np.linspace(0,50,N).reshape(N,1)\\nk(X, xx) # breaks'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nInvalidArgumentError Traceback (most recent call last)\\n<ipython-input-25-d1dbc7941bae> in <module>\\n----> 1 k(X, xx)\\n\\n~/Code/GPflow/gpflow/kernels/base.py in __call__(self, X, X2, full_cov, presliced)\\n170\\n171 else:\\n--> 172 return self.K(X, X2)\\n173\\n174 def __add__(self, other):\\n\\n~/Code/GPflow/gpflow/kernels/changepoints.py in K(self, X, X2)\\n83 N = tf.shape(X)[0]\\n84 ones = tf.ones((N, N, 1), dtype=X.dtype)\\n---> 85 starters = tf.concat([ones, starters], axis=2)\\n86 stoppers = tf.concat([stoppers, ones], axis=2)\\n87\\n\\n~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/util/dispatch.py in wrapper(*args, **kwargs)\\n178 """Call target, and fall back on dispatchers if there is a TypeError."""\\n179 try:\\n--> 180 return target(*args, **kwargs)\\n181 except (TypeError, ValueError):\\n182 # Note: convert_to_eager_tensor currently raises a ValueError, not a\\n\\n~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/ops/array_ops.py in concat(values, axis, name)\\n1515 dtype=dtypes.int32).get_shape().assert_has_rank(0)\\n1516 return identity(values[0], name=name)\\n-> 1517 return gen_array_ops.concat_v2(values=values, axis=axis, name=name)\\n1518\\n1519\\n\\n~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/ops/gen_array_ops.py in concat_v2(values, axis, name)\\n1116 pass # Add nodes to the TensorFlow graph.\\n1117 except _core._NotOkStatusException as e:\\n-> 1118 _ops.raise_from_not_ok_status(e, name)\\n1119 # Add nodes to the TensorFlow graph.\\n1120 if not isinstance(values, (list, tuple)):\\n\\n~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/framework/ops.py in raise_from_not_ok_status(e, name)\\n6604 message = e.message + (" name: " + name if name is not None else "")\\n6605 # pylint: disable=protected-access\\n-> 6606 six.raise_from(core._status_to_exception(e.code, message), None)\\n6607 # pylint: enable=protected-access\\n6608\\n\\n~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/six.py in raise_from(value, from_value)\\n\\nInvalidArgumentError: ConcatOp : Dimensions of inputs should match: shape[0] = [100,100,1] vs. shape[1] = [100,25,1] [Op:ConcatV2] name: concat'}, {'piece_type': 'source code', 'piece_content': 'def K(self, X: tf.Tensor, X2: Optional[tf.Tensor] = None) -> tf.Tensor:\\nsig_X = self._sigmoids(X) # N x 1 x Ncp\\nsig_X2 = self._sigmoids(X2) if X2 is not None else sig_X\\n\\n# `starters` are the sigmoids going from 0 -> 1, whilst `stoppers` go\\n# from 1 -> 0, dimensions are N x N x Ncp\\nstarters = sig_X * tf.transpose(sig_X2, perm=(1, 0, 2))\\nstoppers = (1 - sig_X) * tf.transpose((1 - sig_X2), perm=(1, 0, 2))\\n\\n# prepend `starters` with ones and append ones to `stoppers` since the\\n# first kernel has no start and the last kernel has no end\\nN = tf.shape(X)[0]\\nM = tf.shape(X2)[0] if X2 is not None else N # THIS IS THE FIX\\nones = tf.ones((N, M, 1), dtype=X.dtype) #PREVIOUSLY N WAS IN PLACE OF M HERE\\nstarters = tf.concat([ones, starters], axis=2)\\nstoppers = tf.concat([stoppers, ones], axis=2)'}]
|
---------------------------------------------------------------------------
InvalidArgumentError Traceback (most recent call last)
<ipython-input-25-d1dbc7941bae> in <module>
----> 1 k(X, xx)
~/Code/GPflow/gpflow/kernels/base.py in __call__(self, X, X2, full_cov, presliced)
170
171 else:
--> 172 return self.K(X, X2)
173
174 def __add__(self, other):
~/Code/GPflow/gpflow/kernels/changepoints.py in K(self, X, X2)
83 N = tf.shape(X)[0]
84 ones = tf.ones((N, N, 1), dtype=X.dtype)
---> 85 starters = tf.concat([ones, starters], axis=2)
86 stoppers = tf.concat([stoppers, ones], axis=2)
87
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/util/dispatch.py in wrapper(*args, **kwargs)
178 """Call target, and fall back on dispatchers if there is a TypeError."""
179 try:
--> 180 return target(*args, **kwargs)
181 except (TypeError, ValueError):
182 # Note: convert_to_eager_tensor currently raises a ValueError, not a
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/ops/array_ops.py in concat(values, axis, name)
1515 dtype=dtypes.int32).get_shape().assert_has_rank(0)
1516 return identity(values[0], name=name)
-> 1517 return gen_array_ops.concat_v2(values=values, axis=axis, name=name)
1518
1519
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/ops/gen_array_ops.py in concat_v2(values, axis, name)
1116 pass # Add nodes to the TensorFlow graph.
1117 except _core._NotOkStatusException as e:
-> 1118 _ops.raise_from_not_ok_status(e, name)
1119 # Add nodes to the TensorFlow graph.
1120 if not isinstance(values, (list, tuple)):
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/framework/ops.py in raise_from_not_ok_status(e, name)
6604 message = e.message + (" name: " + name if name is not None else "")
6605 # pylint: disable=protected-access
-> 6606 six.raise_from(core._status_to_exception(e.code, message), None)
6607 # pylint: enable=protected-access
6608
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/six.py in raise_from(value, from_value)
InvalidArgumentError: ConcatOp : Dimensions of inputs should match: shape[0] = [100,100,1] vs. shape[1] = [100,25,1] [Op:ConcatV2] name: concat
|
InvalidArgumentError
|
def autoflow(*af_args, **af_kwargs):
def autoflow_wrapper(method):
@functools.wraps(method)
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError(
'AutoFlow works only with node-like objects.')
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop('session', None)
session = obj.enquire_session(session=session)
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
if not store:
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
return runnable
return autoflow_wrapper
|
def autoflow(*af_args, **af_kwargs):
def autoflow_wrapper(method):
@functools.wraps(method)
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError(
'AutoFlow works only with node-like objects.')
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop('session', None)
session = obj.enquire_session(session=session)
if not store:
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
return runnable
return autoflow_wrapper
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def autoflow_wrapper(method):
@functools.wraps(method)
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError(
'AutoFlow works only with node-like objects.')
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop('session', None)
session = obj.enquire_session(session=session)
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
if not store:
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
return runnable
|
def autoflow_wrapper(method):
@functools.wraps(method)
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError(
'AutoFlow works only with node-like objects.')
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop('session', None)
session = obj.enquire_session(session=session)
if not store:
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
return runnable
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError(
'AutoFlow works only with node-like objects.')
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop('session', None)
session = obj.enquire_session(session=session)
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
if not store:
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
|
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError(
'AutoFlow works only with node-like objects.')
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop('session', None)
session = obj.enquire_session(session=session)
if not store:
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def initialize_variables(variables=None, session=None, force=False, **run_kwargs):
session = tf.get_default_session() if session is None else session
if variables is None:
initializer = tf.global_variables_initializer()
else:
if force:
vars_for_init = list(_initializable_tensors(variables))
else:
vars_for_init = list(_find_initializable_tensors(variables, session))
if not vars_for_init:
return
initializer = tf.variables_initializer(vars_for_init)
session.run(initializer, **run_kwargs)
|
def initialize_variables(variables=None, session=None, force=False, **run_kwargs):
session = tf.get_default_session() if session is None else session
if variables is None:
initializer = tf.global_variables_initializer()
else:
if force:
initializer = tf.variables_initializer(variables)
else:
uninitialized = tf.report_uninitialized_variables(var_list=variables)
def uninitialized_names():
for uv in session.run(uninitialized):
yield uv.decode('utf-8')
# if isinstance(uv, bytes):
# yield uv.decode('utf-8')
# elif isinstance(uv, str):
# yield uv
# else:
# msg = 'Unknown output type "{}" from `tf.report_uninitialized_variables`'
# raise ValueError(msg.format(type(uv)))
names = set(uninitialized_names())
vars_for_init = [v for v in variables if v.name.split(':')[0] in names]
initializer = tf.variables_initializer(vars_for_init)
session.run(initializer, **run_kwargs)
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _clear(self):
self._reset_name()
self._initial_value_tensor = None
self._dataholder_tensor = None
self._is_initialized_tensor = None
|
def _clear(self):
self._reset_name()
self._initial_value_tensor = None
self._dataholder_tensor = None
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _build(self):
tensor = self._build_parameter()
self._dataholder_tensor = tensor
self._is_initialized_tensor = tf.is_variable_initialized(tensor)
|
def _build(self):
self._dataholder_tensor = self._build_parameter() # pylint: disable=W0201
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _init_parameter_defaults(self):
self._initial_value_tensor = None
self._dataholder_tensor = None
self._is_initialized_tensor = None
|
def _init_parameter_defaults(self):
self._initial_value_tensor = None
self._dataholder_tensor = None
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def initializables(self):
if self._externally_defined:
return None
return [(self.parameter_tensor, self.is_initialized_tensor)]
|
def initializables(self):
if self._externally_defined:
return None
return [self.parameter_tensor]
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def read_value(self, session=None):
if session is not None and not isinstance(session, tf.Session):
raise ValueError('TensorFlow session expected as an argument.')
if session is None and self._externally_defined:
raise GPflowError('Externally defined parameter requires session.')
elif session:
is_built = self.is_built_coherence(session.graph)
if is_built is Build.YES:
return self._read_parameter_tensor(session)
return self._value
|
def read_value(self, session=None):
if session is not None:
if not isinstance(session, tf.Session):
raise ValueError('TensorFlow session expected as session argument.')
is_built = self.is_built_coherence(session.graph)
if is_built is Build.YES:
return self._read_parameter_tensor(session)
elif self._externally_defined:
raise GPflowError('Externally defined parameter requires session.')
return self._value
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _clear(self):
self._reset_name()
self._externally_defined = False
self._is_initialized_tensor = None
self._initial_value_tensor = None
self._unconstrained_tensor = None
self._constrained_tensor = None
self._prior_tensor = None
|
def _clear(self):
self._reset_name()
self._externally_defined = False # pylint: disable=W0201
self._initial_value_tensor = None # pylint: disable=W0201
self._unconstrained_tensor = None # pylint: disable=W0201
self._constrained_tensor = None # pylint: disable=W0201
self._prior_tensor = None # pylint: disable=W0201
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _build(self):
unconstrained = self._build_parameter()
constrained = self._build_constrained(unconstrained)
prior = self._build_prior(unconstrained, constrained)
self._is_initialized_tensor = tf.is_variable_initialized(unconstrained)
self._unconstrained_tensor = unconstrained
self._constrained_tensor = constrained
self._prior_tensor = prior
|
def _build(self):
unconstrained = self._build_parameter()
constrained = self._build_constrained(unconstrained)
prior = self._build_prior(unconstrained, constrained)
self._unconstrained_tensor = unconstrained # pylint: disable=W0201
self._constrained_tensor = constrained # pylint: disable=W0201
self._prior_tensor = prior # pylint: disable=W0201
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _build_parameter(self):
if self._externally_defined:
self._check_tensor_trainable(self.parameter_tensor)
return self.parameter_tensor
name = self._parameter_name()
tensor = misc.get_variable_by_name(name)
if tensor is not None:
raise GPflowError('Tensor with name "{name}" already exists, {tensor}.'
.format(name=name, tensor=tensor))
value = self._apply_transform(self._value)
shape = value.shape if self.fixed_shape else None
init = tf.placeholder(self.dtype, shape=shape, name='initial_unconstrained_value')
self._initial_value_tensor = init
if self.fixed_shape:
args = dict(trainable=self.trainable)
else:
args = dict(validate_shape=False, trainable=self.trainable)
variable = tf.get_variable(name, initializer=init, **args)
return variable
|
def _build_parameter(self):
if self._externally_defined:
self._check_tensor_trainable(self.parameter_tensor)
return self.parameter_tensor
name = self._parameter_name()
tensor = misc.get_variable_by_name(name)
if tensor is not None:
raise GPflowError('Tensor with name "{name}" already exists, {tensor}.'
.format(name=name, tensor=tensor))
value = self._apply_transform(self._value)
shape = value.shape if self.fixed_shape else None
init = tf.placeholder(self.dtype, shape=shape, name='initial_unconstrained_value')
self._initial_value_tensor = init
if self.fixed_shape:
return tf.get_variable(name, initializer=init, trainable=self.trainable)
return tf.get_variable(name, initializer=init,
validate_shape=False,
trainable=self.trainable)
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _init_parameter_defaults(self):
self._is_initialized_tensor = None
self._initial_value_tensor = None
self._unconstrained_tensor = None
self._prior_tensor = None
self._constrained_tensor = None
|
def _init_parameter_defaults(self):
self._initial_value_tensor = None
self._unconstrained_tensor = None
self._prior_tensor = None
self._constrained_tensor = None
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def minimize(self, model, session=None, var_list=None, feed_dict=None,
maxiter=1000, initialize=False, anchor=True, **kwargs):
"""
Minimizes objective function of the model.
:param model: GPflow model with objective tensor.
:param session: Session where optimization will be run.
:param var_list: List of extra variables which should be trained during optimization.
:param feed_dict: Feed dictionary of tensors passed to session run method.
:param maxiter: Number of run interation.
:param initialize: If `True` model parameters will be re-initialized even if they were
initialized before for gotten session.
:param anchor: If `True` trained variable values computed during optimization at
particular session will be synchronized with internal parameter values.
:param kwargs: This is a dictionary of extra parameters for session run method.
"""
if model is None or not isinstance(model, Model):
raise ValueError('Unknown type passed for optimization.')
session = model.enquire_session(session)
self._model = model
objective = model.objective
with session.graph.as_default():
full_var_list = self._gen_var_list(model, var_list)
# Create optimizer variables before initialization.
self._minimize_operation = self.optimizer.minimize(
objective, var_list=full_var_list, **kwargs)
model.initialize(session=session, force=initialize)
self._initialize_optimizer(session, full_var_list)
feed_dict = self._gen_feed_dict(model, feed_dict)
for _i in range(maxiter):
session.run(self.minimize_operation, feed_dict=feed_dict)
if anchor:
model.anchor(session)
|
def minimize(self, model, session=None, var_list=None, feed_dict=None,
maxiter=1000, initialize=True, anchor=True, **kwargs):
"""
Minimizes objective function of the model.
:param model: GPflow model with objective tensor.
:param session: Session where optimization will be run.
:param var_list: List of extra variables which should be trained during optimization.
:param feed_dict: Feed dictionary of tensors passed to session run method.
:param maxiter: Number of run interation.
:param initialize: If `True` model parameters will be re-initialized even if they were
initialized before for gotten session.
:param anchor: If `True` trained variable values computed during optimization at
particular session will be synchronized with internal parameter values.
:param kwargs: This is a dictionary of extra parameters for session run method.
"""
if model is None or not isinstance(model, Model):
raise ValueError('Unknown type passed for optimization.')
session = model.enquire_session(session)
self._model = model
objective = model.objective
with session.graph.as_default():
full_var_list = self._gen_var_list(model, var_list)
# Create optimizer variables before initialization.
self._minimize_operation = self.optimizer.minimize(
objective, var_list=full_var_list, **kwargs)
model.initialize(session=session, force=initialize)
self._initialize_optimizer(session, full_var_list)
feed_dict = self._gen_feed_dict(model, feed_dict)
for _i in range(maxiter):
session.run(self.minimize_operation, feed_dict=feed_dict)
if anchor:
model.anchor(session)
|
[{'piece_type': 'source code', 'piece_content': 'model = gpflow.models.svgp.SVGP(np.random.randn(1, 1),\\nnp.random.randn(1, 1),\\ngpflow.kernels.RBF(1),\\ngpflow.likelihoods.Gaussian(),\\nnp.random.randn(1, 1),\\nminibatch_size=1)\\nmodel.compute_log_likelihood()'}, {'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nAttributeError Traceback (most recent call last)\\n<ipython-input-3-e3c07a8fceb7> in <module>()\\n10 np.random.randn(M, D),\\n11 minibatch_size=2)\\n---> 12 model.compute_log_likelihood()\\n\\n/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)\\n152 _setup_storage(store, *af_args, **af_kwargs)\\n153 _build_method(method, obj, store)\\n--> 154 return _session_run(session, obj, store, *args, **kwargs)\\n155 return runnable\\n156 return autoflow_wrapper\\n\\n/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)\\n189 feed_dict.update(obj.feeds)\\n190 initialize = kwargs.pop(\\'initialize\\', False)\\n--> 191 obj.initialize(session=session, force=initialize)\\n192 return session.run(store[\\'result\\'], **kwargs)\\n193\\n\\n/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)\\n82 session=session,\\n83 force=force,\\n---> 84 feed_dict=self.initializable_feeds)\\n85\\n86 def clear(self):\\n\\n/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)\\n82 initializer = tf.variables_initializer(variables)\\n83 else:\\n---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)\\n85 def uninitialized_names():\\n86 for uv in session.run(uninitialized):\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)\\n105 """\\n106 def wrapped(*args, **kwargs):\\n--> 107 return _add_should_use_warning(fn(*args, **kwargs))\\n108 return tf_decorator.make_decorator(\\n109 fn, wrapped, \\'should_use_result\\',\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)\\n1519 variables_mask = math_ops.logical_not(\\n1520 array_ops.stack(\\n-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))\\n1522 # Get a 1-D string tensor containing all the variable names.\\n1523 variable_names_tensor = array_ops.constant(\\n\\n/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)\\n182 A `Tensor` of type `bool`.\\n183 """\\n--> 184 if ref.dtype._is_ref_dtype:\\n185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)\\n186 # Handle resource variables.\\n\\nAttributeError: \\'Iterator\\' object has no attribute \\'dtype\\''}]
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def prob_is_largest(self, Y, mu, var, gh_x, gh_w):
# work out what the mean and variance is of the indicated latent function.
oh_on = tf.cast(tf.one_hot(tf.reshape(Y, (-1,)), self.num_classes, 1., 0.), tf.float64)
mu_selected = tf.reduce_sum(oh_on * mu, 1)
var_selected = tf.reduce_sum(oh_on * var, 1)
# generate Gauss Hermite grid
X = tf.reshape(mu_selected, (-1, 1)) + gh_x * tf.reshape(tf.sqrt(tf.clip_by_value(2. * var_selected, 1e-10, np.inf)), (-1, 1))
# compute the CDF of the Gaussian between the latent functions and the grid (including the selected function)
dist = (tf.expand_dims(X, 1) - tf.expand_dims(mu, 2)) / tf.expand_dims(tf.sqrt(tf.clip_by_value(var, 1e-10, np.inf)), 2)
cdfs = 0.5 * (1.0 + tf.erf(dist/np.sqrt(2.0)))
cdfs = cdfs * (1-2e-4) + 1e-4
# blank out all the distances on the selected latent function
oh_off = tf.cast(tf.one_hot(tf.reshape(Y, (-1,)), self.num_classes, 0., 1.), tf.float64)
cdfs = cdfs * tf.expand_dims(oh_off, 2) + tf.expand_dims(oh_on, 2)
# take the product over the latent functions, and the sum over the GH grid.
return tf.matmul(tf.reduce_prod(cdfs, reduction_indices=[1]), tf.reshape(gh_w/np.sqrt(np.pi), (-1, 1)))
|
def prob_is_largest(self, Y, mu, var, gh_x, gh_w):
# work out what the mean and variance is of the indicated latent function.
oh_on = tf.cast(tf.one_hot(tf.reshape(Y, (-1,)), self.num_classes, 1., 0.), tf.float64)
mu_selected = tf.reduce_sum(oh_on * mu, 1)
var_selected = tf.reduce_sum(oh_on * var, 1)
# generate Gauss Hermite grid
X = tf.reshape(mu_selected, (-1, 1)) + gh_x * tf.reshape(tf.sqrt(tf.clip_by_value(2. * var_selected, 1e-10, np.inf)), (-1, 1))
# compute the CDF of the Gaussian between the latent functions and the grid (including the selected function)
dist = (tf.expand_dims(X, 1) - tf.expand_dims(mu, 2)) / tf.expand_dims(tf.sqrt(tf.clip_by_value(var, 1e-10, np.inf)), 2)
cdfs = 0.5 * (1.0 + tf.erf(dist/np.sqrt(2.0)))
cdfs = cdfs * (1-2e-4) + 1e-4
# blank out all the distances on the selected latent function
oh_off = tf.cast(tf.one_hot(tf.reshape(Y, (-1,)), self.num_classes, 0., 1.), tf.float64)
cdfs = cdfs * tf.expand_dims(oh_off, 2) + tf.expand_dims(oh_on, 2)
# take the product over the latent functions, and the sum over the GH grid.
return tf.matmul(tf.reduce_prod(cdfs, 1), tf.reshape(gh_w/np.sqrt(np.pi), (-1, 1)))
|
[{'piece_type': 'source code', 'piece_content': 'm.kern.white.variance.fixed = True\\nm.Z.fixed = True\\n_ = m.optimize()'}, {'piece_type': 'error message', 'piece_content': 'python\\n---------------------------------------------------------------------------\\nValueError Traceback (most recent call last)\\n<ipython-input-5-e4eebd086840> in <module>()\\n1 m.kern.white.variance.fixed = True\\n2 m.Z.fixed = True\\n----> 3 _ = m.optimize()\\n\\n/Users/danmarthaler/GPflow/GPflow/model.pyc in optimize(self, method, tol, callback, maxiter, **kw)\\n207\\n208 if type(method) is str:\\n--> 209 return self._optimize_np(method, tol, callback, maxiter, **kw)\\n210 else:\\n211 return self._optimize_tf(method, callback, maxiter, **kw)\\n\\n/Users/danmarthaler/GPflow/GPflow/model.pyc in _optimize_np(self, method, tol, callback, maxiter, **kw)\\n265 """\\n266 if self._needs_recompile:\\n--> 267 self._compile()\\n268\\n269 options = dict(disp=True, maxiter=maxiter)\\n\\n/Users/danmarthaler/GPflow/GPflow/model.pyc in _compile(self, optimizer)\\n127 with self.tf_mode():\\n128 f = self.build_likelihood() + self.build_prior()\\n--> 129 g, = tf.gradients(f, self._free_vars)\\n130\\n131 self._minusF = tf.neg(f, name=\\'objective\\')\\n\\n/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/gradients.pyc in gradients(ys, xs, grad_ys, name, colocate_gradients_with_ops, gate_gradients, aggregation_method)\\n476 # If grad_fn was found, do not use SymbolicGradient even for\\n477 # functions.\\n--> 478 in_grads = _AsList(grad_fn(op, *out_grads))\\n479 else:\\n480 # For function call ops, we add a \\'SymbolicGradient\\'\\n\\n/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/math_grad.pyc in _ProdGrad(op, grad)\\n128 reduced = math_ops.cast(op.inputs[1], dtypes.int32)\\n129 idx = math_ops.range(0, array_ops.rank(op.inputs[0]))\\n--> 130 other, _ = array_ops.listdiff(idx, reduced)\\n131 perm = array_ops.concat(0, [reduced, other])\\n132 reduced_num = math_ops.reduce_prod(array_ops.gather(input_shape, reduced))\\n\\n/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/gen_array_ops.pyc in list_diff(x, y, name)\\n1199 idx: A `Tensor` of type `int32`. 1-D. Positions of `x` values preserved in `out`.\\n1200 """\\n-> 1201 result = _op_def_lib.apply_op("ListDiff", x=x, y=y, name=name)\\n1202 return _ListDiffOutput._make(result)\\n1203\\n\\n/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/op_def_library.pyc in apply_op(self, op_type_name, name, **keywords)\\n701 op = g.create_op(op_type_name, inputs, output_types, name=scope,\\n702 input_types=input_types, attrs=attr_protos,\\n--> 703 op_def=op_def)\\n704 outputs = op.outputs\\n705 return _Restructure(ops.convert_n_to_tensor(outputs),\\n\\n/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/ops.pyc in create_op(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_shapes, compute_device)\\n2310 original_op=self._default_original_op, op_def=op_def)\\n2311 if compute_shapes:\\n-> 2312 set_shapes_for_outputs(ret)\\n2313 self._add_op(ret)\\n2314 self._record_op_seen_by_control_dependencies(ret)\\n\\n/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/ops.pyc in set_shapes_for_outputs(op)\\n1702 raise RuntimeError("No shape function registered for standard op: %s"\\n1703 % op.type)\\n-> 1704 shapes = shape_func(op)\\n1705 if shapes is None:\\n1706 raise RuntimeError(\\n\\n/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/array_ops.pyc in _ListDiffShape(op)\\n1979 """Shape function for the ListDiff op."""\\n1980 op.inputs[0].get_shape().assert_has_rank(1)\\n-> 1981 op.inputs[1].get_shape().assert_has_rank(1)\\n1982 # TODO(mrry): Indicate that the length falls within an interval?\\n1983 return [tensor_shape.vector(None)] * 2\\n\\n/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/tensor_shape.pyc in assert_has_rank(self, rank)\\n619 """\\n620 if self.ndims not in (None, rank):\\n--> 621 raise ValueError("Shape %s must have rank %d" % (self, rank))\\n622\\n623 def with_rank(self, rank):\\n\\nValueError: Shape () must have rank 1'}]
|
python
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-e4eebd086840> in <module>()
1 m.kern.white.variance.fixed = True
2 m.Z.fixed = True
----> 3 _ = m.optimize()
/Users/danmarthaler/GPflow/GPflow/model.pyc in optimize(self, method, tol, callback, maxiter, **kw)
207
208 if type(method) is str:
--> 209 return self._optimize_np(method, tol, callback, maxiter, **kw)
210 else:
211 return self._optimize_tf(method, callback, maxiter, **kw)
/Users/danmarthaler/GPflow/GPflow/model.pyc in _optimize_np(self, method, tol, callback, maxiter, **kw)
265 """
266 if self._needs_recompile:
--> 267 self._compile()
268
269 options = dict(disp=True, maxiter=maxiter)
/Users/danmarthaler/GPflow/GPflow/model.pyc in _compile(self, optimizer)
127 with self.tf_mode():
128 f = self.build_likelihood() + self.build_prior()
--> 129 g, = tf.gradients(f, self._free_vars)
130
131 self._minusF = tf.neg(f, name='objective')
/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/gradients.pyc in gradients(ys, xs, grad_ys, name, colocate_gradients_with_ops, gate_gradients, aggregation_method)
476 # If grad_fn was found, do not use SymbolicGradient even for
477 # functions.
--> 478 in_grads = _AsList(grad_fn(op, *out_grads))
479 else:
480 # For function call ops, we add a 'SymbolicGradient'
/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/math_grad.pyc in _ProdGrad(op, grad)
128 reduced = math_ops.cast(op.inputs[1], dtypes.int32)
129 idx = math_ops.range(0, array_ops.rank(op.inputs[0]))
--> 130 other, _ = array_ops.listdiff(idx, reduced)
131 perm = array_ops.concat(0, [reduced, other])
132 reduced_num = math_ops.reduce_prod(array_ops.gather(input_shape, reduced))
/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/gen_array_ops.pyc in list_diff(x, y, name)
1199 idx: A `Tensor` of type `int32`. 1-D. Positions of `x` values preserved in `out`.
1200 """
-> 1201 result = _op_def_lib.apply_op("ListDiff", x=x, y=y, name=name)
1202 return _ListDiffOutput._make(result)
1203
/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/op_def_library.pyc in apply_op(self, op_type_name, name, **keywords)
701 op = g.create_op(op_type_name, inputs, output_types, name=scope,
702 input_types=input_types, attrs=attr_protos,
--> 703 op_def=op_def)
704 outputs = op.outputs
705 return _Restructure(ops.convert_n_to_tensor(outputs),
/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/ops.pyc in create_op(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_shapes, compute_device)
2310 original_op=self._default_original_op, op_def=op_def)
2311 if compute_shapes:
-> 2312 set_shapes_for_outputs(ret)
2313 self._add_op(ret)
2314 self._record_op_seen_by_control_dependencies(ret)
/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/ops.pyc in set_shapes_for_outputs(op)
1702 raise RuntimeError("No shape function registered for standard op: %s"
1703 % op.type)
-> 1704 shapes = shape_func(op)
1705 if shapes is None:
1706 raise RuntimeError(
/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/array_ops.pyc in _ListDiffShape(op)
1979 """Shape function for the ListDiff op."""
1980 op.inputs[0].get_shape().assert_has_rank(1)
-> 1981 op.inputs[1].get_shape().assert_has_rank(1)
1982 # TODO(mrry): Indicate that the length falls within an interval?
1983 return [tensor_shape.vector(None)] * 2
/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/tensor_shape.pyc in assert_has_rank(self, rank)
619 """
620 if self.ndims not in (None, rank):
--> 621 raise ValueError("Shape %s must have rank %d" % (self, rank))
622
623 def with_rank(self, rank):
ValueError: Shape () must have rank 1
|
ValueError
|
def __call__(self, tf_method):
@wraps(tf_method)
def runnable(instance, *np_args):
graph_name = '_' + tf_method.__name__ + '_graph'
if not hasattr(instance, graph_name):
if instance._needs_recompile:
instance._compile() # ensures free_vars is up-to-date.
self.tf_args = [tf.placeholder(*a) for a in self.tf_arg_tuples]
with instance.tf_mode():
graph = tf_method(instance, *self.tf_args)
setattr(instance, graph_name, graph)
feed_dict = dict(zip(self.tf_args, np_args))
feed_dict[instance._free_vars] = instance.get_free_state()
graph = getattr(instance, graph_name)
return instance._session.run(graph, feed_dict=feed_dict)
return runnable
|
def __call__(self, tf_method):
@wraps(tf_method)
def runnable(instance, *np_args):
graph_name = '_' + tf_method.__name__ + '_graph'
if not hasattr(instance, graph_name):
instance._compile()
self.tf_args = [tf.placeholder(*a) for a in self.tf_arg_tuples]
with instance.tf_mode():
graph = tf_method(instance, *self.tf_args)
setattr(instance, graph_name, graph)
feed_dict = dict(zip(self.tf_args, np_args))
feed_dict[instance._free_vars] = instance.get_free_state()
graph = getattr(instance, graph_name)
return instance._session.run(graph, feed_dict=feed_dict)
return runnable
|
[{'piece_type': 'other', 'piece_content': 'import GPflow\\nimport tensorflow as tf\\nimport os\\nimport numpy as np\\ndef getData():\\nrng = np.random.RandomState( 1 )\\nN = 30\\nX = rng.rand(N,1)\\nY = np.sin(12*X) + 0.66*np.cos(25*X) + rng.randn(N,1)*0.1 + 3\\nreturn X,Y\\nif __name__ == \\'__main__\\':\\nX,Y = getData()\\nk = GPflow.kernels.Matern52(1)\\nmeanf = GPflow.mean_functions.Linear(1,0)\\nm = GPflow.gpr.GPR(X, Y, k, meanf)\\nm.likelihood.variance = 0.01\\nm._compile()\\nprint "Here are the parameters before optimization"\\nprint m\\nm.kern.variance.fixed = True\\n#m._compile() # If we compile again the code below works\\n[mu,var] = m.predict_f(X)\\nprint mu\\nprint \\'done\\''}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "/Users/mqbssaby/PrivateProjects/BranchedGP/runfile.py", line 29, in <module>\\n[mu,var] = m.predict_f(X)\\nFile "/Users/mqbssaby/pythonlibs/GPflow/GPflow/model.py", line 82, in runnable\\nreturn instance._session.run(graph, feed_dict=feed_dict)\\nFile "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 340, in run\\nrun_metadata_ptr)\\nFile "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 553, in _run\\n% (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))\\nValueError: Cannot feed value of shape (4,) for Tensor u\\'Variable:0\\', which has shape \\'(5,)\\''}]
|
Traceback (most recent call last):
File "/Users/mqbssaby/PrivateProjects/BranchedGP/runfile.py", line 29, in <module>
[mu,var] = m.predict_f(X)
File "/Users/mqbssaby/pythonlibs/GPflow/GPflow/model.py", line 82, in runnable
return instance._session.run(graph, feed_dict=feed_dict)
File "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 340, in run
run_metadata_ptr)
File "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 553, in _run
% (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))
ValueError: Cannot feed value of shape (4,) for Tensor u'Variable:0', which has shape '(5,)'
|
ValueError
|
def runnable(instance, *np_args):
graph_name = '_' + tf_method.__name__ + '_graph'
if not hasattr(instance, graph_name):
if instance._needs_recompile:
instance._compile() # ensures free_vars is up-to-date.
self.tf_args = [tf.placeholder(*a) for a in self.tf_arg_tuples]
with instance.tf_mode():
graph = tf_method(instance, *self.tf_args)
setattr(instance, graph_name, graph)
feed_dict = dict(zip(self.tf_args, np_args))
feed_dict[instance._free_vars] = instance.get_free_state()
graph = getattr(instance, graph_name)
return instance._session.run(graph, feed_dict=feed_dict)
|
def runnable(instance, *np_args):
graph_name = '_' + tf_method.__name__ + '_graph'
if not hasattr(instance, graph_name):
instance._compile()
self.tf_args = [tf.placeholder(*a) for a in self.tf_arg_tuples]
with instance.tf_mode():
graph = tf_method(instance, *self.tf_args)
setattr(instance, graph_name, graph)
feed_dict = dict(zip(self.tf_args, np_args))
feed_dict[instance._free_vars] = instance.get_free_state()
graph = getattr(instance, graph_name)
return instance._session.run(graph, feed_dict=feed_dict)
|
[{'piece_type': 'other', 'piece_content': 'import GPflow\\nimport tensorflow as tf\\nimport os\\nimport numpy as np\\ndef getData():\\nrng = np.random.RandomState( 1 )\\nN = 30\\nX = rng.rand(N,1)\\nY = np.sin(12*X) + 0.66*np.cos(25*X) + rng.randn(N,1)*0.1 + 3\\nreturn X,Y\\nif __name__ == \\'__main__\\':\\nX,Y = getData()\\nk = GPflow.kernels.Matern52(1)\\nmeanf = GPflow.mean_functions.Linear(1,0)\\nm = GPflow.gpr.GPR(X, Y, k, meanf)\\nm.likelihood.variance = 0.01\\nm._compile()\\nprint "Here are the parameters before optimization"\\nprint m\\nm.kern.variance.fixed = True\\n#m._compile() # If we compile again the code below works\\n[mu,var] = m.predict_f(X)\\nprint mu\\nprint \\'done\\''}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "/Users/mqbssaby/PrivateProjects/BranchedGP/runfile.py", line 29, in <module>\\n[mu,var] = m.predict_f(X)\\nFile "/Users/mqbssaby/pythonlibs/GPflow/GPflow/model.py", line 82, in runnable\\nreturn instance._session.run(graph, feed_dict=feed_dict)\\nFile "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 340, in run\\nrun_metadata_ptr)\\nFile "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 553, in _run\\n% (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))\\nValueError: Cannot feed value of shape (4,) for Tensor u\\'Variable:0\\', which has shape \\'(5,)\\''}]
|
Traceback (most recent call last):
File "/Users/mqbssaby/PrivateProjects/BranchedGP/runfile.py", line 29, in <module>
[mu,var] = m.predict_f(X)
File "/Users/mqbssaby/pythonlibs/GPflow/GPflow/model.py", line 82, in runnable
return instance._session.run(graph, feed_dict=feed_dict)
File "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 340, in run
run_metadata_ptr)
File "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 553, in _run
% (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))
ValueError: Cannot feed value of shape (4,) for Tensor u'Variable:0', which has shape '(5,)'
|
ValueError
|
def browse(self, uri):
logger.debug("Browsing files at: %s", uri)
result = []
local_path = path.uri_to_path(uri)
if str(local_path) == "root":
return list(self._get_media_dirs_refs())
if not self._is_in_basedir(local_path):
logger.warning(
"Rejected attempt to browse path (%s) outside dirs defined "
"in file/media_dirs config.",
uri,
)
return []
if path.uri_to_path(uri).is_file():
logger.error("Rejected attempt to browse file (%s)", uri)
return []
for dir_entry in local_path.iterdir():
child_path = dir_entry.resolve()
uri = path.path_to_uri(child_path)
if not self._show_dotfiles and dir_entry.name.startswith("."):
continue
if (
self._excluded_file_extensions
and dir_entry.suffix in self._excluded_file_extensions
):
continue
if child_path.is_symlink() and not self._follow_symlinks:
logger.debug("Ignoring symlink: %s", uri)
continue
if not self._is_in_basedir(child_path):
logger.debug("Ignoring symlink to outside base dir: %s", uri)
continue
if child_path.is_dir():
result.append(
models.Ref.directory(name=dir_entry.name, uri=uri)
)
elif child_path.is_file():
result.append(models.Ref.track(name=dir_entry.name, uri=uri))
def order(item):
return (item.type != models.Ref.DIRECTORY, item.name)
result.sort(key=order)
return result
|
def browse(self, uri):
logger.debug("Browsing files at: %s", uri)
result = []
local_path = path.uri_to_path(uri)
if str(local_path) == "root":
return list(self._get_media_dirs_refs())
if not self._is_in_basedir(local_path):
logger.warning(
"Rejected attempt to browse path (%s) outside dirs defined "
"in file/media_dirs config.",
uri,
)
return []
for dir_entry in local_path.iterdir():
child_path = dir_entry.resolve()
uri = path.path_to_uri(child_path)
if not self._show_dotfiles and dir_entry.name.startswith("."):
continue
if (
self._excluded_file_extensions
and dir_entry.suffix in self._excluded_file_extensions
):
continue
if child_path.is_symlink() and not self._follow_symlinks:
logger.debug("Ignoring symlink: %s", uri)
continue
if not self._is_in_basedir(child_path):
logger.debug("Ignoring symlink to outside base dir: %s", uri)
continue
if child_path.is_dir():
result.append(
models.Ref.directory(name=dir_entry.name, uri=uri)
)
elif child_path.is_file():
result.append(models.Ref.track(name=dir_entry.name, uri=uri))
def order(item):
return (item.type != models.Ref.DIRECTORY, item.name)
result.sort(key=order)
return result
|
[{'piece_type': 'error message', 'piece_content': 'FileBackend-2 DEBUG 2020-05-07 14:06:45,889 Browsing files at: file:///home/nick/Music/Tall%20Ships%20-%20Chemistry.mp3\\nCore-7 ERROR 2020-05-07 14:06:45,889 FileBackend backend caused an exception.\\nTraceback (most recent call last):\\nFile "/home/nick/Dev/mopidy-dev/mopidy/mopidy/core/library.py", line 17, in _backend_error_handling\\nyield\\nFile "/home/nick/Dev/mopidy-dev/mopidy/mopidy/core/library.py", line 114, in _browse\\nresult = backend.library.browse(uri).get()\\nFile "/usr/lib/python3/dist-packages/pykka/_threading.py", line 45, in get\\n_compat.reraise(*self._data[\\'exc_info\\'])\\nFile "/usr/lib/python3/dist-packages/pykka/_compat/__init__.py", line 29, in reraise\\nraise value\\nFile "/usr/lib/python3/dist-packages/pykka/_actor.py", line 193, in _actor_loop\\nresponse = self._handle_receive(envelope.message)\\nFile "/usr/lib/python3/dist-packages/pykka/_actor.py", line 299, in _handle_receive\\nreturn callee(*message.args, **message.kwargs)\\nFile "/home/nick/Dev/mopidy-dev/mopidy/mopidy/file/library.py", line 55, in browse\\nfor dir_entry in local_path.iterdir():\\nFile "/usr/lib/python3.8/pathlib.py", line 1113, in iterdir\\nfor name in self._accessor.listdir(self):\\nNotADirectoryError: [Errno 20] Not a directory: \\'/home/nick/Music/Tall Ships - Chemistry.mp3\\''}]
|
FileBackend-2 DEBUG 2020-05-07 14:06:45,889 Browsing files at: file:///home/nick/Music/Tall%20Ships%20-%20Chemistry.mp3
Core-7 ERROR 2020-05-07 14:06:45,889 FileBackend backend caused an exception.
Traceback (most recent call last):
File "/home/nick/Dev/mopidy-dev/mopidy/mopidy/core/library.py", line 17, in _backend_error_handling
yield
File "/home/nick/Dev/mopidy-dev/mopidy/mopidy/core/library.py", line 114, in _browse
result = backend.library.browse(uri).get()
File "/usr/lib/python3/dist-packages/pykka/_threading.py", line 45, in get
_compat.reraise(*self._data['exc_info'])
File "/usr/lib/python3/dist-packages/pykka/_compat/__init__.py", line 29, in reraise
raise value
File "/usr/lib/python3/dist-packages/pykka/_actor.py", line 193, in _actor_loop
response = self._handle_receive(envelope.message)
File "/usr/lib/python3/dist-packages/pykka/_actor.py", line 299, in _handle_receive
return callee(*message.args, **message.kwargs)
File "/home/nick/Dev/mopidy-dev/mopidy/mopidy/file/library.py", line 55, in browse
for dir_entry in local_path.iterdir():
File "/usr/lib/python3.8/pathlib.py", line 1113, in iterdir
for name in self._accessor.listdir(self):
NotADirectoryError: [Errno 20] Not a directory: '/home/nick/Music/Tall Ships - Chemistry.mp3'
|
NotADirectoryError
|
def on_error(self, error, debug):
gst_logger.error(f"GStreamer error: {error.message}")
gst_logger.debug(
f"Got ERROR bus message: error={error!r} debug={debug!r}"
)
# TODO: is this needed?
self._audio.stop_playback()
|
def on_error(self, error, debug):
error_msg = str(error).decode()
debug_msg = debug.decode()
gst_logger.debug(
"Got ERROR bus message: error=%r debug=%r", error_msg, debug_msg
)
gst_logger.error("GStreamer error: %s", error_msg)
# TODO: is this needed?
self._audio.stop_playback()
|
[{'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 219, in on_message\\nself.on_error(error, debug)\\nFile "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 328, in on_error\\nerror_msg = str(error).decode()\\nAttributeError: \\'str\\' object has no attribute \\'decode\\''}]
|
Traceback (most recent call last):
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 219, in on_message
self.on_error(error, debug)
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 328, in on_error
error_msg = str(error).decode()
AttributeError: 'str' object has no attribute 'decode'
|
AttributeError
|
def on_warning(self, error, debug):
gst_logger.warning(f"GStreamer warning: {error.message}")
gst_logger.debug(
f"Got WARNING bus message: error={error!r} debug={debug!r}"
)
|
def on_warning(self, error, debug):
error_msg = str(error).decode()
debug_msg = debug.decode()
gst_logger.warning("GStreamer warning: %s", error_msg)
gst_logger.debug(
"Got WARNING bus message: error=%r debug=%r", error_msg, debug_msg
)
|
[{'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 219, in on_message\\nself.on_error(error, debug)\\nFile "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 328, in on_error\\nerror_msg = str(error).decode()\\nAttributeError: \\'str\\' object has no attribute \\'decode\\''}]
|
Traceback (most recent call last):
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 219, in on_message
self.on_error(error, debug)
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 328, in on_error
error_msg = str(error).decode()
AttributeError: 'str' object has no attribute 'decode'
|
AttributeError
|
def _unwrap_stream(uri, timeout, scanner, requests_session):
"""
Get a stream URI from a playlist URI, ``uri``.
Unwraps nested playlists until something that's not a playlist is found or
the ``timeout`` is reached.
"""
original_uri = uri
seen_uris = set()
deadline = time.time() + timeout
while time.time() < deadline:
if uri in seen_uris:
logger.info(
'Unwrapping stream from URI (%s) failed: '
'playlist referenced itself', uri)
return None, None
else:
seen_uris.add(uri)
logger.debug('Unwrapping stream from URI: %s', uri)
try:
scan_timeout = deadline - time.time()
if scan_timeout < 0:
logger.info(
'Unwrapping stream from URI (%s) failed: '
'timed out in %sms', uri, timeout)
return None, None
scan_result = scanner.scan(uri, timeout=scan_timeout)
except exceptions.ScannerError as exc:
logger.debug('GStreamer failed scanning URI (%s): %s', uri, exc)
scan_result = None
if scan_result is not None:
has_interesting_mime = (
scan_result.mime is not None and
not scan_result.mime.startswith('text/') and
not scan_result.mime.startswith('application/')
)
if scan_result.playable or has_interesting_mime:
logger.debug(
'Unwrapped potential %s stream: %s', scan_result.mime, uri)
return uri, scan_result
download_timeout = deadline - time.time()
if download_timeout < 0:
logger.info(
'Unwrapping stream from URI (%s) failed: timed out in %sms',
uri, timeout)
return None, None
content = http.download(
requests_session, uri, timeout=download_timeout / 1000)
if content is None:
logger.info(
'Unwrapping stream from URI (%s) failed: '
'error downloading URI %s', original_uri, uri)
return None, None
uris = playlists.parse(content)
if not uris:
logger.debug(
'Failed parsing URI (%s) as playlist; found potential stream.',
uri)
return uri, None
# TODO Test streams and return first that seems to be playable
logger.debug(
'Parsed playlist (%s) and found new URI: %s', uri, uris[0])
uri = urllib.parse.urljoin(uri, uris[0])
|
def _unwrap_stream(uri, timeout, scanner, requests_session):
"""
Get a stream URI from a playlist URI, ``uri``.
Unwraps nested playlists until something that's not a playlist is found or
the ``timeout`` is reached.
"""
original_uri = uri
seen_uris = set()
deadline = time.time() + timeout
while time.time() < deadline:
if uri in seen_uris:
logger.info(
'Unwrapping stream from URI (%s) failed: '
'playlist referenced itself', uri)
return None, None
else:
seen_uris.add(uri)
logger.debug('Unwrapping stream from URI: %s', uri)
try:
scan_timeout = deadline - time.time()
if scan_timeout < 0:
logger.info(
'Unwrapping stream from URI (%s) failed: '
'timed out in %sms', uri, timeout)
return None, None
scan_result = scanner.scan(uri, timeout=scan_timeout)
except exceptions.ScannerError as exc:
logger.debug('GStreamer failed scanning URI (%s): %s', uri, exc)
scan_result = None
if scan_result is not None:
if scan_result.playable or (
not scan_result.mime.startswith('text/') and
not scan_result.mime.startswith('application/')
):
logger.debug(
'Unwrapped potential %s stream: %s', scan_result.mime, uri)
return uri, scan_result
download_timeout = deadline - time.time()
if download_timeout < 0:
logger.info(
'Unwrapping stream from URI (%s) failed: timed out in %sms',
uri, timeout)
return None, None
content = http.download(
requests_session, uri, timeout=download_timeout / 1000)
if content is None:
logger.info(
'Unwrapping stream from URI (%s) failed: '
'error downloading URI %s', original_uri, uri)
return None, None
uris = playlists.parse(content)
if not uris:
logger.debug(
'Failed parsing URI (%s) as playlist; found potential stream.',
uri)
return uri, None
# TODO Test streams and return first that seems to be playable
logger.debug(
'Parsed playlist (%s) and found new URI: %s', uri, uris[0])
uri = urllib.parse.urljoin(uri, uris[0])
|
[{'piece_type': 'error message', 'piece_content': 'ERROR StreamBackend backend caused an exception.\\nTraceback (most recent call last):\\nFile "/usr/lib64/python2.7/site-packages/mopidy/core/library.py", line 19, in _backend_error_handling\\nyield\\nFile "/usr/lib64/python2.7/site-packages/mopidy/core/library.py", line 237, in lookup\\nresult = future.get()\\nFile "/usr/lib64/python2.7/site-packages/pykka/threading.py", line 52, in get\\ncompat.reraise(*self._data[\\'exc_info\\'])\\nFile "/usr/lib64/python2.7/site-packages/pykka/compat.py", line 12, in reraise\\nexec(\\'raise tp, value, tb\\')\\nFile "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop\\nresponse = self._handle_receive(message)\\nFile "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive\\nreturn callee(*message[\\'args\\'], **message[\\'kwargs\\'])\\nFile "/usr/lib64/python2.7/site-packages/mopidy/stream/actor.py", line 65, in lookup\\nrequests_session=self.backend._session)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/stream/actor.py", line 131, in _unwrap_stream\\nnot scan_result.mime.startswith(\\'text/\\') and\\nAttributeError: \\'NoneType\\' object has no attribute \\'startswith\\''}]
|
ERROR StreamBackend backend caused an exception.
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/mopidy/core/library.py", line 19, in _backend_error_handling
yield
File "/usr/lib64/python2.7/site-packages/mopidy/core/library.py", line 237, in lookup
result = future.get()
File "/usr/lib64/python2.7/site-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/usr/lib64/python2.7/site-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/lib64/python2.7/site-packages/mopidy/stream/actor.py", line 65, in lookup
requests_session=self.backend._session)
File "/usr/lib64/python2.7/site-packages/mopidy/stream/actor.py", line 131, in _unwrap_stream
not scan_result.mime.startswith('text/') and
AttributeError: 'NoneType' object has no attribute 'startswith'
|
AttributeError
|
def listplaylist(context, name):
"""
*musicpd.org, stored playlists section:*
``listplaylist {NAME}``
Lists the files in the playlist ``NAME.m3u``.
Output format::
file: relative/path/to/file1.flac
file: relative/path/to/file2.ogg
file: relative/path/to/file3.mp3
"""
playlist = _get_playlist(context, name)
return [translator.uri_to_mpd_format(t.uri) for t in playlist.tracks]
|
def listplaylist(context, name):
"""
*musicpd.org, stored playlists section:*
``listplaylist {NAME}``
Lists the files in the playlist ``NAME.m3u``.
Output format::
file: relative/path/to/file1.flac
file: relative/path/to/file2.ogg
file: relative/path/to/file3.mp3
"""
playlist = _get_playlist(context, name)
return ['file: %s' % t.uri for t in playlist.tracks]
|
[{'piece_type': 'error message', 'piece_content': 'ERROR Unhandled exception in MpdSession (urn:uuid:76575e20-c10f-46e2-bc60-404ed1cffc27):\\nTraceback (most recent call last):\\nFile "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop\\nresponse = self._handle_receive(message)\\nFile "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive\\nreturn self.on_receive(message)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/internal/network.py", line 423, in on_receive\\nself.on_line_received(line)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/session.py", line 34, in on_line_received\\nresponse = self.dispatcher.handle_request(line)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 77, in _catch_mpd_ack_errors_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 87, in _authenticate_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 106, in _command_list_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 135, in _idle_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 148, in _add_ok_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 160, in _call_handler_filter\\nresponse = self._format_response(self._call_handler(request))\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 184, in _format_response\\nformatted_response.extend(self._format_lines(element))\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 210, in _format_lines\\nreturn [\\'%s: %s\\' % (key, value)]\\nUnicodeDecodeError: \\'ascii\\' codec can\\'t decode byte 0xc3 in position 38: ordinal not in range(128)'}]
|
ERROR Unhandled exception in MpdSession (urn:uuid:76575e20-c10f-46e2-bc60-404ed1cffc27):
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive
return self.on_receive(message)
File "/usr/lib64/python2.7/site-packages/mopidy/internal/network.py", line 423, in on_receive
self.on_line_received(line)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/session.py", line 34, in on_line_received
response = self.dispatcher.handle_request(line)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 77, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 87, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 106, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 135, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 148, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 160, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 184, in _format_response
formatted_response.extend(self._format_lines(element))
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 210, in _format_lines
return ['%s: %s' % (key, value)]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 38: ordinal not in range(128)
|
UnicodeDecodeError
|
def track_to_mpd_format(track, position=None, stream_title=None):
"""
Format track for output to MPD client.
:param track: the track
:type track: :class:`mopidy.models.Track` or :class:`mopidy.models.TlTrack`
:param position: track's position in playlist
:type position: integer
:param stream_title: the current streams title
:type position: string
:rtype: list of two-tuples
"""
if isinstance(track, TlTrack):
(tlid, track) = track
else:
(tlid, track) = (None, track)
if not track.uri:
logger.warning('Ignoring track without uri')
return []
result = [
uri_to_mpd_format(track.uri),
('Time', track.length and (track.length // 1000) or 0),
('Artist', concat_multi_values(track.artists, 'name')),
('Album', track.album and track.album.name or ''),
]
if stream_title is not None:
result.append(('Title', stream_title))
if track.name:
result.append(('Name', track.name))
else:
result.append(('Title', track.name or ''))
if track.date:
result.append(('Date', track.date))
if track.album is not None and track.album.num_tracks is not None:
result.append(('Track', '%d/%d' % (
track.track_no or 0, track.album.num_tracks)))
else:
result.append(('Track', track.track_no or 0))
if position is not None and tlid is not None:
result.append(('Pos', position))
result.append(('Id', tlid))
if track.album is not None and track.album.musicbrainz_id is not None:
result.append(('MUSICBRAINZ_ALBUMID', track.album.musicbrainz_id))
if track.album is not None and track.album.artists:
result.append(
('AlbumArtist', concat_multi_values(track.album.artists, 'name')))
musicbrainz_ids = concat_multi_values(
track.album.artists, 'musicbrainz_id')
if musicbrainz_ids:
result.append(('MUSICBRAINZ_ALBUMARTISTID', musicbrainz_ids))
if track.artists:
musicbrainz_ids = concat_multi_values(track.artists, 'musicbrainz_id')
if musicbrainz_ids:
result.append(('MUSICBRAINZ_ARTISTID', musicbrainz_ids))
if track.composers:
result.append(
('Composer', concat_multi_values(track.composers, 'name')))
if track.performers:
result.append(
('Performer', concat_multi_values(track.performers, 'name')))
if track.genre:
result.append(('Genre', track.genre))
if track.disc_no:
result.append(('Disc', track.disc_no))
if track.last_modified:
datestring = datetime.datetime.utcfromtimestamp(
track.last_modified // 1000).isoformat()
result.append(('Last-Modified', datestring + 'Z'))
if track.musicbrainz_id is not None:
result.append(('MUSICBRAINZ_TRACKID', track.musicbrainz_id))
if track.album and track.album.uri:
result.append(('X-AlbumUri', track.album.uri))
if track.album and track.album.images:
images = ';'.join(i for i in track.album.images if i != '')
result.append(('X-AlbumImage', images))
result = [element for element in result if _has_value(*element)]
return result
|
def track_to_mpd_format(track, position=None, stream_title=None):
"""
Format track for output to MPD client.
:param track: the track
:type track: :class:`mopidy.models.Track` or :class:`mopidy.models.TlTrack`
:param position: track's position in playlist
:type position: integer
:param stream_title: the current streams title
:type position: string
:rtype: list of two-tuples
"""
if isinstance(track, TlTrack):
(tlid, track) = track
else:
(tlid, track) = (None, track)
if not track.uri:
logger.warning('Ignoring track without uri')
return []
result = [
('file', track.uri),
('Time', track.length and (track.length // 1000) or 0),
('Artist', concat_multi_values(track.artists, 'name')),
('Album', track.album and track.album.name or ''),
]
if stream_title is not None:
result.append(('Title', stream_title))
if track.name:
result.append(('Name', track.name))
else:
result.append(('Title', track.name or ''))
if track.date:
result.append(('Date', track.date))
if track.album is not None and track.album.num_tracks is not None:
result.append(('Track', '%d/%d' % (
track.track_no or 0, track.album.num_tracks)))
else:
result.append(('Track', track.track_no or 0))
if position is not None and tlid is not None:
result.append(('Pos', position))
result.append(('Id', tlid))
if track.album is not None and track.album.musicbrainz_id is not None:
result.append(('MUSICBRAINZ_ALBUMID', track.album.musicbrainz_id))
if track.album is not None and track.album.artists:
result.append(
('AlbumArtist', concat_multi_values(track.album.artists, 'name')))
musicbrainz_ids = concat_multi_values(
track.album.artists, 'musicbrainz_id')
if musicbrainz_ids:
result.append(('MUSICBRAINZ_ALBUMARTISTID', musicbrainz_ids))
if track.artists:
musicbrainz_ids = concat_multi_values(track.artists, 'musicbrainz_id')
if musicbrainz_ids:
result.append(('MUSICBRAINZ_ARTISTID', musicbrainz_ids))
if track.composers:
result.append(
('Composer', concat_multi_values(track.composers, 'name')))
if track.performers:
result.append(
('Performer', concat_multi_values(track.performers, 'name')))
if track.genre:
result.append(('Genre', track.genre))
if track.disc_no:
result.append(('Disc', track.disc_no))
if track.last_modified:
datestring = datetime.datetime.utcfromtimestamp(
track.last_modified // 1000).isoformat()
result.append(('Last-Modified', datestring + 'Z'))
if track.musicbrainz_id is not None:
result.append(('MUSICBRAINZ_TRACKID', track.musicbrainz_id))
if track.album and track.album.uri:
result.append(('X-AlbumUri', track.album.uri))
if track.album and track.album.images:
images = ';'.join(i for i in track.album.images if i != '')
result.append(('X-AlbumImage', images))
result = [element for element in result if _has_value(*element)]
return result
|
[{'piece_type': 'error message', 'piece_content': 'ERROR Unhandled exception in MpdSession (urn:uuid:76575e20-c10f-46e2-bc60-404ed1cffc27):\\nTraceback (most recent call last):\\nFile "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop\\nresponse = self._handle_receive(message)\\nFile "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive\\nreturn self.on_receive(message)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/internal/network.py", line 423, in on_receive\\nself.on_line_received(line)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/session.py", line 34, in on_line_received\\nresponse = self.dispatcher.handle_request(line)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 77, in _catch_mpd_ack_errors_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 87, in _authenticate_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 106, in _command_list_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 135, in _idle_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 148, in _add_ok_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 160, in _call_handler_filter\\nresponse = self._format_response(self._call_handler(request))\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 184, in _format_response\\nformatted_response.extend(self._format_lines(element))\\nFile "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 210, in _format_lines\\nreturn [\\'%s: %s\\' % (key, value)]\\nUnicodeDecodeError: \\'ascii\\' codec can\\'t decode byte 0xc3 in position 38: ordinal not in range(128)'}]
|
ERROR Unhandled exception in MpdSession (urn:uuid:76575e20-c10f-46e2-bc60-404ed1cffc27):
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive
return self.on_receive(message)
File "/usr/lib64/python2.7/site-packages/mopidy/internal/network.py", line 423, in on_receive
self.on_line_received(line)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/session.py", line 34, in on_line_received
response = self.dispatcher.handle_request(line)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 77, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 87, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 106, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 135, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 148, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 160, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 184, in _format_response
formatted_response.extend(self._format_lines(element))
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 210, in _format_lines
return ['%s: %s' % (key, value)]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 38: ordinal not in range(128)
|
UnicodeDecodeError
|
def _get_user_dirs(xdg_config_dir):
"""Returns a dict of XDG dirs read from
``$XDG_CONFIG_HOME/user-dirs.dirs``.
This is used at import time for most users of :mod:`mopidy`. By rolling our
own implementation instead of using :meth:`glib.get_user_special_dir` we
make it possible for many extensions to run their test suites, which are
importing parts of :mod:`mopidy`, in a virtualenv with global site-packages
disabled, and thus no :mod:`glib` available.
"""
dirs_file = os.path.join(xdg_config_dir, b'user-dirs.dirs')
if not os.path.exists(dirs_file):
return {}
with open(dirs_file, 'rb') as fh:
data = fh.read()
data = b'[XDG_USER_DIRS]\\n' + data
data = data.replace(b'$HOME', os.path.expanduser(b'~'))
data = data.replace(b'"', b'')
config = configparser.RawConfigParser()
config.readfp(io.BytesIO(data))
return {
k.upper().decode('utf-8'): os.path.abspath(v)
for k, v in config.items('XDG_USER_DIRS') if v is not None
}
|
def _get_user_dirs(xdg_config_dir):
"""Returns a dict of XDG dirs read from
``$XDG_CONFIG_HOME/user-dirs.dirs``.
This is used at import time for most users of :mod:`mopidy`. By rolling our
own implementation instead of using :meth:`glib.get_user_special_dir` we
make it possible for many extensions to run their test suites, which are
importing parts of :mod:`mopidy`, in a virtualenv with global site-packages
disabled, and thus no :mod:`glib` available.
"""
dirs_file = os.path.join(xdg_config_dir, b'user-dirs.dirs')
if not os.path.exists(dirs_file):
return {}
with open(dirs_file, 'rb') as fh:
data = fh.read().decode('utf-8')
data = '[XDG_USER_DIRS]\\n' + data
data = data.replace('$HOME', os.path.expanduser('~'))
data = data.replace('"', '')
config = configparser.RawConfigParser()
config.readfp(io.StringIO(data))
return {
k.upper(): os.path.abspath(v)
for k, v in config.items('XDG_USER_DIRS') if v is not None}
|
[{'piece_type': 'error message', 'piece_content': 'ERROR FileBackend backend caused an exception.\\nTraceback (most recent call last):\\nFile "/usr/lib/python2.7/dist-packages/mopidy/core/library.py", line 19, in _backend_error_handling\\nyield\\nFile "/usr/lib/python2.7/dist-packages/mopidy/core/library.py", line 112, in _browse\\nresult = backend.library.browse(uri).get()\\nFile "/usr/lib/python2.7/dist-packages/pykka/threading.py", line 52, in get\\ncompat.reraise(*self._data[\\'exc_info\\'])\\nFile "/usr/lib/python2.7/dist-packages/pykka/compat.py", line 12, in reraise\\nexec(\\'raise tp, value, tb\\')\\nFile "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 201, in _actor_loop\\nresponse = self._handle_receive(message)\\nFile "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 295, in _handle_receive\\nreturn callee(*message[\\'args\\'], **message[\\'kwargs\\'])\\nFile "/usr/lib/python2.7/dist-packages/mopidy/file/library.py", line 53, in browse\\nif not self._is_in_basedir(os.path.realpath(local_path)):\\nFile "/usr/lib/python2.7/dist-packages/mopidy/file/library.py", line 146, in _is_in_basedir\\nfor media_dir in self._media_dirs)\\nFile "/usr/lib/python2.7/dist-packages/mopidy/file/library.py", line 146, in <genexpr>\\nfor media_dir in self._media_dirs)\\nFile "/usr/lib/python2.7/dist-packages/mopidy/internal/path.py", line\\n210, in is_path_inside_base_dir\\nraise ValueError(\\'base_path is not a bytestring\\')\\nValueError: base_path is not a bytestring'}]
|
ERROR FileBackend backend caused an exception.
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/mopidy/core/library.py", line 19, in _backend_error_handling
yield
File "/usr/lib/python2.7/dist-packages/mopidy/core/library.py", line 112, in _browse
result = backend.library.browse(uri).get()
File "/usr/lib/python2.7/dist-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/usr/lib/python2.7/dist-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/lib/python2.7/dist-packages/mopidy/file/library.py", line 53, in browse
if not self._is_in_basedir(os.path.realpath(local_path)):
File "/usr/lib/python2.7/dist-packages/mopidy/file/library.py", line 146, in _is_in_basedir
for media_dir in self._media_dirs)
File "/usr/lib/python2.7/dist-packages/mopidy/file/library.py", line 146, in <genexpr>
for media_dir in self._media_dirs)
File "/usr/lib/python2.7/dist-packages/mopidy/internal/path.py", line
210, in is_path_inside_base_dir
raise ValueError('base_path is not a bytestring')
ValueError: base_path is not a bytestring
|
ValueError
|
def validate(self, value):
value = super(Identifier, self).validate(value)
if isinstance(value, compat.text_type):
value = value.encode('utf-8')
return compat.intern(value)
|
def validate(self, value):
return compat.intern(str(super(Identifier, self).validate(value)))
|
[{'piece_type': 'error message', 'piece_content': 'INFO Scanned 3500 of 5494 files in 25s, ~14s left.\\nERROR \\'ascii\\' codec can\\'t encode character u\\'\\\\ufeff\\' in position 0: ordinal not in range(128)\\nTraceback (most recent call last):\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/__main__.py", line 134, in main\\nreturn args.command.run(args, proxied_config)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/local/commands.py", line 150, in run\\ntrack = tags.convert_tags_to_track(result.tags).replace(\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 81, in convert_tags_to_track\\n\\'musicbrainz-sortname\\')\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 137, in _artists\\nreturn [Artist(**attrs)]\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 158, in __call__\\n*args, **kwargs)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 34, in __init__\\nself._set_field(key, value)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 186, in _set_field\\nobject.__setattr__(self, name, value)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 50, in __set__\\nvalue = self.validate(value)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 98, in validate\\nreturn compat.intern(str(super(Identifier, self).validate(value)))\\nUnicodeEncodeError: \\'ascii\\' codec can\\'t encode character u\\'\\\\ufeff\\' in position 0: ordinal not in range(128)\\nTraceback (most recent call last):\\nFile "/usr/local/bin/mopidy", line 9, in <module>\\nload_entry_point(\\'Mopidy==2.0.0\\', \\'console_scripts\\', \\'mopidy\\')()\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/__main__.py", line 134, in main\\nreturn args.command.run(args, proxied_config)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/local/commands.py", line 150, in run\\ntrack = tags.convert_tags_to_track(result.tags).replace(\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 81, in convert_tags_to_track\\n\\'musicbrainz-sortname\\')\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 137, in _artists\\nreturn [Artist(**attrs)]\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 158, in __call__\\n*args, **kwargs)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 34, in __init__\\nself._set_field(key, value)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 186, in _set_field\\nobject.__setattr__(self, name, value)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 50, in __set__\\nvalue = self.validate(value)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 98, in validate\\nreturn compat.intern(str(super(Identifier, self).validate(value)))\\nUnicodeEncodeError: \\'ascii\\' codec can\\'t encode character u\\'\\\\ufeff\\' in position 0: ordinal not in range(128)'}]
|
INFO Scanned 3500 of 5494 files in 25s, ~14s left.
ERROR 'ascii' codec can't encode character u'\\ufeff' in position 0: ordinal not in range(128)
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/mopidy/__main__.py", line 134, in main
return args.command.run(args, proxied_config)
File "/usr/local/lib/python2.7/site-packages/mopidy/local/commands.py", line 150, in run
track = tags.convert_tags_to_track(result.tags).replace(
File "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 81, in convert_tags_to_track
'musicbrainz-sortname')
File "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 137, in _artists
return [Artist(**attrs)]
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 158, in __call__
*args, **kwargs)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 34, in __init__
self._set_field(key, value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 186, in _set_field
object.__setattr__(self, name, value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 50, in __set__
value = self.validate(value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 98, in validate
return compat.intern(str(super(Identifier, self).validate(value)))
UnicodeEncodeError: 'ascii' codec can't encode character u'\\ufeff' in position 0: ordinal not in range(128)
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 9, in <module>
load_entry_point('Mopidy==2.0.0', 'console_scripts', 'mopidy')()
File "/usr/local/lib/python2.7/site-packages/mopidy/__main__.py", line 134, in main
return args.command.run(args, proxied_config)
File "/usr/local/lib/python2.7/site-packages/mopidy/local/commands.py", line 150, in run
track = tags.convert_tags_to_track(result.tags).replace(
File "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 81, in convert_tags_to_track
'musicbrainz-sortname')
File "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 137, in _artists
return [Artist(**attrs)]
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 158, in __call__
*args, **kwargs)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 34, in __init__
self._set_field(key, value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 186, in _set_field
object.__setattr__(self, name, value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 50, in __set__
value = self.validate(value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 98, in validate
return compat.intern(str(super(Identifier, self).validate(value)))
UnicodeEncodeError: 'ascii' codec can't encode character u'\\ufeff' in position 0: ordinal not in range(128)
|
UnicodeEncodeError
|
def on_stream_start(self):
gst_logger.debug('Got STREAM_START bus message')
uri = self._audio._pending_uri
logger.debug('Audio event: stream_changed(uri=%r)', uri)
AudioListener.send('stream_changed', uri=uri)
# Emit any postponed tags that we got after about-to-finish.
tags, self._audio._pending_tags = self._audio._pending_tags, None
self._audio._tags = tags or {}
if tags:
logger.debug('Audio event: tags_changed(tags=%r)', tags.keys())
AudioListener.send('tags_changed', tags=tags.keys())
|
def on_stream_start(self):
gst_logger.debug('Got STREAM_START bus message')
uri = self._audio._pending_uri
logger.debug('Audio event: stream_changed(uri=%r)', uri)
AudioListener.send('stream_changed', uri=uri)
# Emit any postponed tags that we got after about-to-finish.
tags, self._audio._pending_tags = self._audio._pending_tags, None
self._audio._tags = tags
if tags:
logger.debug('Audio event: tags_changed(tags=%r)', tags.keys())
AudioListener.send('tags_changed', tags=tags.keys())
|
[{'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 225, in on_message\\nself.on_tag(taglist)\\nFile "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 340, in on_tag\\nif self._audio._tags.get(key, unique) != value:\\nAttributeError: \\'NoneType\\' object has no attribute \\'get\\'\\nTraceback (most recent call last):\\nFile "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 225, in on_message\\nself.on_tag(taglist)\\nFile "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 340, in on_tag\\nif self._audio._tags.get(key, unique) != value:\\nAttributeError: \\'NoneType\\' object has no attribute \\'get\\''}]
|
Traceback (most recent call last):
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 225, in on_message
self.on_tag(taglist)
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 340, in on_tag
if self._audio._tags.get(key, unique) != value:
AttributeError: 'NoneType' object has no attribute 'get'
Traceback (most recent call last):
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 225, in on_message
self.on_tag(taglist)
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 340, in on_tag
if self._audio._tags.get(key, unique) != value:
AttributeError: 'NoneType' object has no attribute 'get'
|
AttributeError
|
def on_playbin_state_changed(self, old_state, new_state, pending_state):
gst_logger.debug(
'Got STATE_CHANGED bus message: old=%s new=%s pending=%s',
old_state.value_name, new_state.value_name,
pending_state.value_name)
if new_state == Gst.State.READY and pending_state == Gst.State.NULL:
# XXX: We're not called on the last state change when going down to
# NULL, so we rewrite the second to last call to get the expected
# behavior.
new_state = Gst.State.NULL
pending_state = Gst.State.VOID_PENDING
if pending_state != Gst.State.VOID_PENDING:
return # Ignore intermediate state changes
if new_state == Gst.State.READY:
return # Ignore READY state as it's GStreamer specific
new_state = _GST_STATE_MAPPING[new_state]
old_state, self._audio.state = self._audio.state, new_state
target_state = _GST_STATE_MAPPING.get(self._audio._target_state)
if target_state is None:
# XXX: Workaround for #1430, to be fixed properly by #1222.
logger.debug('Race condition happened. See #1222 and #1430.')
return
if target_state == new_state:
target_state = None
logger.debug('Audio event: state_changed(old_state=%s, new_state=%s, '
'target_state=%s)', old_state, new_state, target_state)
AudioListener.send('state_changed', old_state=old_state,
new_state=new_state, target_state=target_state)
if new_state == PlaybackState.STOPPED:
logger.debug('Audio event: stream_changed(uri=None)')
AudioListener.send('stream_changed', uri=None)
if 'GST_DEBUG_DUMP_DOT_DIR' in os.environ:
Gst.debug_bin_to_dot_file(
self._audio._playbin, Gst.DebugGraphDetails.ALL, 'mopidy')
|
def on_playbin_state_changed(self, old_state, new_state, pending_state):
gst_logger.debug(
'Got STATE_CHANGED bus message: old=%s new=%s pending=%s',
old_state.value_name, new_state.value_name,
pending_state.value_name)
if new_state == Gst.State.READY and pending_state == Gst.State.NULL:
# XXX: We're not called on the last state change when going down to
# NULL, so we rewrite the second to last call to get the expected
# behavior.
new_state = Gst.State.NULL
pending_state = Gst.State.VOID_PENDING
if pending_state != Gst.State.VOID_PENDING:
return # Ignore intermediate state changes
if new_state == Gst.State.READY:
return # Ignore READY state as it's GStreamer specific
new_state = _GST_STATE_MAPPING[new_state]
old_state, self._audio.state = self._audio.state, new_state
target_state = _GST_STATE_MAPPING[self._audio._target_state]
if target_state == new_state:
target_state = None
logger.debug('Audio event: state_changed(old_state=%s, new_state=%s, '
'target_state=%s)', old_state, new_state, target_state)
AudioListener.send('state_changed', old_state=old_state,
new_state=new_state, target_state=target_state)
if new_state == PlaybackState.STOPPED:
logger.debug('Audio event: stream_changed(uri=None)')
AudioListener.send('stream_changed', uri=None)
if 'GST_DEBUG_DUMP_DOT_DIR' in os.environ:
Gst.debug_bin_to_dot_file(
self._audio._playbin, Gst.DebugGraphDetails.ALL, 'mopidy')
|
[{'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "/home/trygve/dev/mopidy/mopidy/mopidy/audio/actor.py", line 210, in on_message\\nself.on_playbin_state_changed(old_state, new_state, pending_state)\\nFile "/home/trygve/dev/mopidy/mopidy/mopidy/audio/actor.py", line 260, in on_playbin_state_changed\\ntarget_state = _GST_STATE_MAPPING[self._audio._target_state]\\nKeyError: <enum GST_STATE_READY of type GstState>'}]
|
Traceback (most recent call last):
File "/home/trygve/dev/mopidy/mopidy/mopidy/audio/actor.py", line 210, in on_message
self.on_playbin_state_changed(old_state, new_state, pending_state)
File "/home/trygve/dev/mopidy/mopidy/mopidy/audio/actor.py", line 260, in on_playbin_state_changed
target_state = _GST_STATE_MAPPING[self._audio._target_state]
KeyError: <enum GST_STATE_READY of type GstState>
|
KeyError
|
def playlist_uri_from_name(self, name):
"""
Helper function to retrieve a playlist URI from its unique MPD name.
"""
if name not in self._uri_from_name:
self.refresh_playlists_mapping()
return self._uri_from_name.get(name)
|
def playlist_uri_from_name(self, name):
"""
Helper function to retrieve a playlist URI from its unique MPD name.
"""
if not self._uri_from_name:
self.refresh_playlists_mapping()
return self._uri_from_name.get(name)
|
[{'piece_type': 'error message', 'piece_content': '2015-12-04 23:41:33,959 ERROR [MpdSession-13] /home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py:269\\npykka Unhandled exception in MpdSession (urn:uuid:093fbff0-33df-4e39-ba0b-c7259431372c):\\nTraceback (most recent call last):\\nFile "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop\\nresponse = self._handle_receive(message)\\nFile "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive\\nreturn self.on_receive(message)\\nFile "/home/adamcik/dev/mopidy/mopidy/internal/network.py", line 370, in on_receive\\nself.on_line_received(line)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/session.py", line 34, in on_line_received\\nresponse = self.dispatcher.handle_request(line)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 47, in handle_request\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 76, in _catch_mpd_ack_errors_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 89, in _authenticate_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 105, in _command_list_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 134, in _idle_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 147, in _add_ok_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 159, in _call_handler_filter\\nresponse = self._format_response(self._call_handler(request))\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 174, in _call_handler\\nreturn protocol.commands.call(tokens, context=self.context)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/protocol/__init__.py", line 180, in call\\nreturn self.handlers[tokens[0]](context, *tokens[1:])\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/protocol/__init__.py", line 158, in validate\\nreturn func(**callargs)\\nFile "/home/adamcik/dev/mopidy/mopidy/mpd/protocol/stored_playlists.py", line 331, in rm\\ncontext.core.playlists.delete(uri).get()\\nFile "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/threading.py", line 52, in get\\ncompat.reraise(*self._data[\\'exc_info\\'])\\nFile "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/compat.py", line 12, in reraise\\nexec(\\'raise tp, value, tb\\')\\nFile "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop\\nresponse = self._handle_receive(message)\\nFile "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive\\nreturn callee(*message[\\'args\\'], **message[\\'kwargs\\'])\\nFile "/home/adamcik/dev/mopidy/mopidy/core/playlists.py", line 176, in delete\\nvalidation.check_uri(uri)\\nFile "/home/adamcik/dev/mopidy/mopidy/internal/validation.py", line 98, in check_uri\\nraise exceptions.ValidationError(msg.format(arg=arg))\\nValidationError: Expected a valid URI, not None'}]
|
2015-12-04 23:41:33,959 ERROR [MpdSession-13] /home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py:269
pykka Unhandled exception in MpdSession (urn:uuid:093fbff0-33df-4e39-ba0b-c7259431372c):
Traceback (most recent call last):
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive
return self.on_receive(message)
File "/home/adamcik/dev/mopidy/mopidy/internal/network.py", line 370, in on_receive
self.on_line_received(line)
File "/home/adamcik/dev/mopidy/mopidy/mpd/session.py", line 34, in on_line_received
response = self.dispatcher.handle_request(line)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 76, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 89, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 105, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 134, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 147, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 159, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 174, in _call_handler
return protocol.commands.call(tokens, context=self.context)
File "/home/adamcik/dev/mopidy/mopidy/mpd/protocol/__init__.py", line 180, in call
return self.handlers[tokens[0]](context, *tokens[1:])
File "/home/adamcik/dev/mopidy/mopidy/mpd/protocol/__init__.py", line 158, in validate
return func(**callargs)
File "/home/adamcik/dev/mopidy/mopidy/mpd/protocol/stored_playlists.py", line 331, in rm
context.core.playlists.delete(uri).get()
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/home/adamcik/dev/mopidy/mopidy/core/playlists.py", line 176, in delete
validation.check_uri(uri)
File "/home/adamcik/dev/mopidy/mopidy/internal/validation.py", line 98, in check_uri
raise exceptions.ValidationError(msg.format(arg=arg))
ValidationError: Expected a valid URI, not None
|
ValidationError
|
def _get_library(args, config):
libraries = dict((l.name, l) for l in args.registry['local:library'])
library_name = config['local']['library']
if library_name not in libraries:
logger.error('Local library %s not found', library_name)
return None
logger.debug('Using %s as the local library', library_name)
return libraries[library_name](config)
|
def _get_library(args, config):
libraries = dict((l.name, l) for l in args.registry['local:library'])
library_name = config['local']['library']
if library_name not in libraries:
logger.warning('Local library %s not found', library_name)
return 1
logger.debug('Using %s as the local library', library_name)
return libraries[library_name](config)
|
[{'piece_type': 'error message', 'piece_content': 'INFO Starting Mopidy 1.1.1\\nINFO Loading config from builtin defaults\\nINFO Loading config from /etc/mopidy/mopidy.conf\\nINFO Loading config from command line options\\nINFO Enabled extensions: mpd, http, stream, podcast-gpodder, m3u, podcast-itunes, softwaremixer, file, musicbox_webclient, podcast, local, tunein, soundcloud\\nINFO Disabled extensions: none\\nWARNING Local library images not found\\nINFO Found 8597 files in media_dir.\\nERROR \\'int\\' object has no attribute \\'load\\'\\nTraceback (most recent call last):\\nFile "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main\\nreturn args.command.run(args, proxied_config)\\nFile "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run\\nnum_tracks = library.load()\\nAttributeError: \\'int\\' object has no attribute \\'load\\'\\nTraceback (most recent call last):\\nFile "/usr/bin/mopidy", line 9, in <module>\\nload_entry_point(\\'Mopidy==1.1.1\\', \\'console_scripts\\', \\'mopidy\\')()\\nFile "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main\\nreturn args.command.run(args, proxied_config)\\nFile "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run\\nnum_tracks = library.load()\\nAttributeError: \\'int\\' object has no attribute \\'load\\''}]
|
INFO Starting Mopidy 1.1.1
INFO Loading config from builtin defaults
INFO Loading config from /etc/mopidy/mopidy.conf
INFO Loading config from command line options
INFO Enabled extensions: mpd, http, stream, podcast-gpodder, m3u, podcast-itunes, softwaremixer, file, musicbox_webclient, podcast, local, tunein, soundcloud
INFO Disabled extensions: none
WARNING Local library images not found
INFO Found 8597 files in media_dir.
ERROR 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/bin/mopidy", line 9, in <module>
load_entry_point('Mopidy==1.1.1', 'console_scripts', 'mopidy')()
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
|
AttributeError
|
def run(self, args, config):
library = _get_library(args, config)
if library is None:
return 1
prompt = '\\nAre you sure you want to clear the library? [y/N] '
if compat.input(prompt).lower() != 'y':
print('Clearing library aborted.')
return 0
if library.clear():
print('Library successfully cleared.')
return 0
print('Unable to clear library.')
return 1
|
def run(self, args, config):
library = _get_library(args, config)
prompt = '\\nAre you sure you want to clear the library? [y/N] '
if compat.input(prompt).lower() != 'y':
print('Clearing library aborted.')
return 0
if library.clear():
print('Library successfully cleared.')
return 0
print('Unable to clear library.')
return 1
|
[{'piece_type': 'error message', 'piece_content': 'INFO Starting Mopidy 1.1.1\\nINFO Loading config from builtin defaults\\nINFO Loading config from /etc/mopidy/mopidy.conf\\nINFO Loading config from command line options\\nINFO Enabled extensions: mpd, http, stream, podcast-gpodder, m3u, podcast-itunes, softwaremixer, file, musicbox_webclient, podcast, local, tunein, soundcloud\\nINFO Disabled extensions: none\\nWARNING Local library images not found\\nINFO Found 8597 files in media_dir.\\nERROR \\'int\\' object has no attribute \\'load\\'\\nTraceback (most recent call last):\\nFile "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main\\nreturn args.command.run(args, proxied_config)\\nFile "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run\\nnum_tracks = library.load()\\nAttributeError: \\'int\\' object has no attribute \\'load\\'\\nTraceback (most recent call last):\\nFile "/usr/bin/mopidy", line 9, in <module>\\nload_entry_point(\\'Mopidy==1.1.1\\', \\'console_scripts\\', \\'mopidy\\')()\\nFile "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main\\nreturn args.command.run(args, proxied_config)\\nFile "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run\\nnum_tracks = library.load()\\nAttributeError: \\'int\\' object has no attribute \\'load\\''}]
|
INFO Starting Mopidy 1.1.1
INFO Loading config from builtin defaults
INFO Loading config from /etc/mopidy/mopidy.conf
INFO Loading config from command line options
INFO Enabled extensions: mpd, http, stream, podcast-gpodder, m3u, podcast-itunes, softwaremixer, file, musicbox_webclient, podcast, local, tunein, soundcloud
INFO Disabled extensions: none
WARNING Local library images not found
INFO Found 8597 files in media_dir.
ERROR 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/bin/mopidy", line 9, in <module>
load_entry_point('Mopidy==1.1.1', 'console_scripts', 'mopidy')()
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
|
AttributeError
|
def run(self, args, config):
media_dir = config['local']['media_dir']
scan_timeout = config['local']['scan_timeout']
flush_threshold = config['local']['scan_flush_threshold']
excluded_file_extensions = config['local']['excluded_file_extensions']
excluded_file_extensions = tuple(
bytes(file_ext.lower()) for file_ext in excluded_file_extensions)
library = _get_library(args, config)
if library is None:
return 1
file_mtimes, file_errors = path.find_mtimes(
media_dir, follow=config['local']['scan_follow_symlinks'])
logger.info('Found %d files in media_dir.', len(file_mtimes))
if file_errors:
logger.warning('Encountered %d errors while scanning media_dir.',
len(file_errors))
for name in file_errors:
logger.debug('Scan error %r for %r', file_errors[name], name)
num_tracks = library.load()
logger.info('Checking %d tracks from library.', num_tracks)
uris_to_update = set()
uris_to_remove = set()
uris_in_library = set()
for track in library.begin():
abspath = translator.local_track_uri_to_path(track.uri, media_dir)
mtime = file_mtimes.get(abspath)
if mtime is None:
logger.debug('Missing file %s', track.uri)
uris_to_remove.add(track.uri)
elif mtime > track.last_modified or args.force:
uris_to_update.add(track.uri)
uris_in_library.add(track.uri)
logger.info('Removing %d missing tracks.', len(uris_to_remove))
for uri in uris_to_remove:
library.remove(uri)
for abspath in file_mtimes:
relpath = os.path.relpath(abspath, media_dir)
uri = translator.path_to_local_track_uri(relpath)
if b'/.' in relpath:
logger.debug('Skipped %s: Hidden directory/file.', uri)
elif relpath.lower().endswith(excluded_file_extensions):
logger.debug('Skipped %s: File extension excluded.', uri)
elif uri not in uris_in_library:
uris_to_update.add(uri)
logger.info(
'Found %d tracks which need to be updated.', len(uris_to_update))
logger.info('Scanning...')
uris_to_update = sorted(uris_to_update, key=lambda v: v.lower())
uris_to_update = uris_to_update[:args.limit]
scanner = scan.Scanner(scan_timeout)
progress = _Progress(flush_threshold, len(uris_to_update))
for uri in uris_to_update:
try:
relpath = translator.local_track_uri_to_path(uri, media_dir)
file_uri = path.path_to_uri(os.path.join(media_dir, relpath))
result = scanner.scan(file_uri)
tags, duration = result.tags, result.duration
if not result.playable:
logger.warning('Failed %s: No audio found in file.', uri)
elif duration < MIN_DURATION_MS:
logger.warning('Failed %s: Track shorter than %dms',
uri, MIN_DURATION_MS)
else:
mtime = file_mtimes.get(os.path.join(media_dir, relpath))
track = utils.convert_tags_to_track(tags).replace(
uri=uri, length=duration, last_modified=mtime)
if library.add_supports_tags_and_duration:
library.add(track, tags=tags, duration=duration)
else:
library.add(track)
logger.debug('Added %s', track.uri)
except exceptions.ScannerError as error:
logger.warning('Failed %s: %s', uri, error)
if progress.increment():
progress.log()
if library.flush():
logger.debug('Progress flushed.')
progress.log()
library.close()
logger.info('Done scanning.')
return 0
|
def run(self, args, config):
media_dir = config['local']['media_dir']
scan_timeout = config['local']['scan_timeout']
flush_threshold = config['local']['scan_flush_threshold']
excluded_file_extensions = config['local']['excluded_file_extensions']
excluded_file_extensions = tuple(
bytes(file_ext.lower()) for file_ext in excluded_file_extensions)
library = _get_library(args, config)
file_mtimes, file_errors = path.find_mtimes(
media_dir, follow=config['local']['scan_follow_symlinks'])
logger.info('Found %d files in media_dir.', len(file_mtimes))
if file_errors:
logger.warning('Encountered %d errors while scanning media_dir.',
len(file_errors))
for name in file_errors:
logger.debug('Scan error %r for %r', file_errors[name], name)
num_tracks = library.load()
logger.info('Checking %d tracks from library.', num_tracks)
uris_to_update = set()
uris_to_remove = set()
uris_in_library = set()
for track in library.begin():
abspath = translator.local_track_uri_to_path(track.uri, media_dir)
mtime = file_mtimes.get(abspath)
if mtime is None:
logger.debug('Missing file %s', track.uri)
uris_to_remove.add(track.uri)
elif mtime > track.last_modified or args.force:
uris_to_update.add(track.uri)
uris_in_library.add(track.uri)
logger.info('Removing %d missing tracks.', len(uris_to_remove))
for uri in uris_to_remove:
library.remove(uri)
for abspath in file_mtimes:
relpath = os.path.relpath(abspath, media_dir)
uri = translator.path_to_local_track_uri(relpath)
if b'/.' in relpath:
logger.debug('Skipped %s: Hidden directory/file.', uri)
elif relpath.lower().endswith(excluded_file_extensions):
logger.debug('Skipped %s: File extension excluded.', uri)
elif uri not in uris_in_library:
uris_to_update.add(uri)
logger.info(
'Found %d tracks which need to be updated.', len(uris_to_update))
logger.info('Scanning...')
uris_to_update = sorted(uris_to_update, key=lambda v: v.lower())
uris_to_update = uris_to_update[:args.limit]
scanner = scan.Scanner(scan_timeout)
progress = _Progress(flush_threshold, len(uris_to_update))
for uri in uris_to_update:
try:
relpath = translator.local_track_uri_to_path(uri, media_dir)
file_uri = path.path_to_uri(os.path.join(media_dir, relpath))
result = scanner.scan(file_uri)
tags, duration = result.tags, result.duration
if not result.playable:
logger.warning('Failed %s: No audio found in file.', uri)
elif duration < MIN_DURATION_MS:
logger.warning('Failed %s: Track shorter than %dms',
uri, MIN_DURATION_MS)
else:
mtime = file_mtimes.get(os.path.join(media_dir, relpath))
track = utils.convert_tags_to_track(tags).replace(
uri=uri, length=duration, last_modified=mtime)
if library.add_supports_tags_and_duration:
library.add(track, tags=tags, duration=duration)
else:
library.add(track)
logger.debug('Added %s', track.uri)
except exceptions.ScannerError as error:
logger.warning('Failed %s: %s', uri, error)
if progress.increment():
progress.log()
if library.flush():
logger.debug('Progress flushed.')
progress.log()
library.close()
logger.info('Done scanning.')
return 0
|
[{'piece_type': 'error message', 'piece_content': 'INFO Starting Mopidy 1.1.1\\nINFO Loading config from builtin defaults\\nINFO Loading config from /etc/mopidy/mopidy.conf\\nINFO Loading config from command line options\\nINFO Enabled extensions: mpd, http, stream, podcast-gpodder, m3u, podcast-itunes, softwaremixer, file, musicbox_webclient, podcast, local, tunein, soundcloud\\nINFO Disabled extensions: none\\nWARNING Local library images not found\\nINFO Found 8597 files in media_dir.\\nERROR \\'int\\' object has no attribute \\'load\\'\\nTraceback (most recent call last):\\nFile "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main\\nreturn args.command.run(args, proxied_config)\\nFile "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run\\nnum_tracks = library.load()\\nAttributeError: \\'int\\' object has no attribute \\'load\\'\\nTraceback (most recent call last):\\nFile "/usr/bin/mopidy", line 9, in <module>\\nload_entry_point(\\'Mopidy==1.1.1\\', \\'console_scripts\\', \\'mopidy\\')()\\nFile "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main\\nreturn args.command.run(args, proxied_config)\\nFile "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run\\nnum_tracks = library.load()\\nAttributeError: \\'int\\' object has no attribute \\'load\\''}]
|
INFO Starting Mopidy 1.1.1
INFO Loading config from builtin defaults
INFO Loading config from /etc/mopidy/mopidy.conf
INFO Loading config from command line options
INFO Enabled extensions: mpd, http, stream, podcast-gpodder, m3u, podcast-itunes, softwaremixer, file, musicbox_webclient, podcast, local, tunein, soundcloud
INFO Disabled extensions: none
WARNING Local library images not found
INFO Found 8597 files in media_dir.
ERROR 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/bin/mopidy", line 9, in <module>
load_entry_point('Mopidy==1.1.1', 'console_scripts', 'mopidy')()
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
|
AttributeError
|
def parse_urilist(data):
result = []
for line in data.splitlines():
if not line.strip() or line.startswith(b'#'):
continue
try:
validation.check_uri(line)
except ValueError:
return []
result.append(line)
return result
|
def parse_urilist(data):
result = []
for line in data.splitlines():
if not line.strip() or line.startswith('#'):
continue
try:
validation.check_uri(line)
except ValueError:
return []
result.append(line)
return result
|
[{'piece_type': 'error message', 'piece_content': 'INFO 2015-08-22 22:19:26,991 [855:MpdSession-31] mopidy.mpd.session\\nNew MPD connection from [::ffff:127.0.0.1]:50701\\nDEBUG 2015-08-22 22:19:26,993 [855:MpdSession-31] mopidy.mpd.session\\nRequest from [::ffff:127.0.0.1]:50701: command_list_begin\\nDEBUG 2015-08-22 22:19:26,993 [855:MpdSession-31] mopidy.mpd.session\\nRequest from [::ffff:127.0.0.1]:50701: add "http://feedproxy.google.com/~r/WelcomeToNightVale/~5/tXeJa4IGs-8/23-EternalScouts.mp3"\\nDEBUG 2015-08-22 22:19:26,994 [855:MpdSession-31] mopidy.mpd.session\\nRequest from [::ffff:127.0.0.1]:50701: play "0"\\nDEBUG 2015-08-22 22:19:26,994 [855:MpdSession-31] mopidy.mpd.session\\nRequest from [::ffff:127.0.0.1]:50701: command_list_end\\nDEBUG 2015-08-22 22:19:28,176 [855:Core-27] mopidy.core.tracklist\\nTriggering event: tracklist_changed()\\nDEBUG 2015-08-22 22:19:28,177 [855:MainThread] mopidy.listener\\nSending tracklist_changed to CoreListener: {}\\nDEBUG 2015-08-22 22:19:28,177 [855:Core-27] mopidy.core.playback\\nChanging state: stopped -> playing\\nDEBUG 2015-08-22 22:19:28,177 [855:Core-27] mopidy.core.playback\\nTriggering playback state change event\\nDEBUG 2015-08-22 22:19:28,179 [855:MainThread] mopidy.listener\\nSending playback_state_changed to CoreListener: {\\'old_state\\': u\\'stopped\\', \\'new_state\\': u\\'playing\\'}\\nDEBUG 2015-08-22 22:19:28,179 [855:Audio-2] mopidy.audio.gst\\nState change to GST_STATE_READY: result=GST_STATE_CHANGE_SUCCESS\\nDEBUG 2015-08-22 22:19:28,179 [855:MainThread] mopidy.audio.gst\\nGot state-changed message: old=GST_STATE_NULL new=GST_STATE_READY pending=GST_STATE_VOID_PENDING\\nINFO 2015-08-22 22:19:34,545 [855:MpdSession-32] mopidy.mpd.session\\nNew MPD connection from [::ffff:127.0.0.1]:50713\\nDEBUG 2015-08-22 22:19:34,547 [855:MpdSession-32] mopidy.mpd.session\\nRequest from [::ffff:127.0.0.1]:50713: status\\nERROR 2015-08-22 22:19:38,324 [855:MpdSession-31] pykka\\nUnhandled exception in MpdSession (urn:uuid:8a894042-6120-4236-a944-cd336bd7c8b3):\\nTraceback (most recent call last):\\nFile "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop\\nresponse = self._handle_receive(message)\\nFile "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive\\nreturn self.on_receive(message)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/internal/network.py", line 370, in on_receive\\nself.on_line_received(line)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/session.py", line 34, in on_line_received\\nresponse = self.dispatcher.handle_request(line)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 76, in _catch_mpd_ack_errors_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 86, in _authenticate_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 105, in _command_list_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 134, in _idle_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 147, in _add_ok_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 159, in _call_handler_filter\\nresponse = self._format_response(self._call_handler(request))\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 174, in _call_handler\\nreturn protocol.commands.call(tokens, context=self.context)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 180, in call\\nreturn self.handlers[tokens[0]](context, *tokens[1:])\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 158, in validate\\nreturn func(**callargs)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/command_list.py", line 42, in command_list_end\\ncommand, current_command_list_index=index)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 76, in _catch_mpd_ack_errors_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 86, in _authenticate_filter\\nreturn self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 105, in _command_list_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 134, in _idle_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 147, in _add_ok_filter\\nresponse = self._call_next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter\\nreturn next_filter(request, response, filter_chain)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 159, in _call_handler_filter\\nresponse = self._format_response(self._call_handler(request))\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 174, in _call_handler\\nreturn protocol.commands.call(tokens, context=self.context)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 180, in call\\nreturn self.handlers[tokens[0]](context, *tokens[1:])\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 158, in validate\\nreturn func(**callargs)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/playback.py", line 181, in play\\nreturn context.core.playback.play(tl_track).get()\\nFile "/usr/local/lib/python2.7/site-packages/pykka/threading.py", line 52, in get\\ncompat.reraise(*self._data[\\'exc_info\\'])\\nFile "/usr/local/lib/python2.7/site-packages/pykka/compat.py", line 12, in reraise\\nexec(\\'raise tp, value, tb\\')\\nFile "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop\\nresponse = self._handle_receive(message)\\nFile "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive\\nreturn callee(*message[\\'args\\'], **message[\\'kwargs\\'])\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/core/playback.py", line 305, in play\\nself._play(tl_track=tl_track, tlid=tlid, on_error_step=1)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/core/playback.py", line 348, in _play\\nbackend.playback.change_track(tl_track.track).get() and\\nFile "/usr/local/lib/python2.7/site-packages/pykka/threading.py", line 52, in get\\ncompat.reraise(*self._data[\\'exc_info\\'])\\nFile "/usr/local/lib/python2.7/site-packages/pykka/compat.py", line 12, in reraise\\nexec(\\'raise tp, value, tb\\')\\nFile "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop\\nresponse = self._handle_receive(message)\\nFile "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive\\nreturn callee(*message[\\'args\\'], **message[\\'kwargs\\'])\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/backend.py", line 245, in change_track\\nuri = self.translate_uri(track.uri)\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/stream/actor.py", line 90, in translate_uri\\ntracks = list(playlists.parse(content))\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/internal/playlists.py", line 28, in parse\\nreturn parse_urilist(data) # Fallback\\nFile "/usr/local/lib/python2.7/site-packages/mopidy/internal/playlists.py", line 125, in parse_urilist\\nif not line.strip() or line.startswith(\\'#\\'):\\nUnicodeDecodeError: \\'ascii\\' codec can\\'t decode byte 0xdf in position 154: ordinal not in range(128)\\nDEBUG 2015-08-22 22:19:38,326 [855:Audio-2] mopidy.audio.actor\\nPosition query failed'}]
|
INFO 2015-08-22 22:19:26,991 [855:MpdSession-31] mopidy.mpd.session
New MPD connection from [::ffff:127.0.0.1]:50701
DEBUG 2015-08-22 22:19:26,993 [855:MpdSession-31] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50701: command_list_begin
DEBUG 2015-08-22 22:19:26,993 [855:MpdSession-31] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50701: add "http://feedproxy.google.com/~r/WelcomeToNightVale/~5/tXeJa4IGs-8/23-EternalScouts.mp3"
DEBUG 2015-08-22 22:19:26,994 [855:MpdSession-31] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50701: play "0"
DEBUG 2015-08-22 22:19:26,994 [855:MpdSession-31] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50701: command_list_end
DEBUG 2015-08-22 22:19:28,176 [855:Core-27] mopidy.core.tracklist
Triggering event: tracklist_changed()
DEBUG 2015-08-22 22:19:28,177 [855:MainThread] mopidy.listener
Sending tracklist_changed to CoreListener: {}
DEBUG 2015-08-22 22:19:28,177 [855:Core-27] mopidy.core.playback
Changing state: stopped -> playing
DEBUG 2015-08-22 22:19:28,177 [855:Core-27] mopidy.core.playback
Triggering playback state change event
DEBUG 2015-08-22 22:19:28,179 [855:MainThread] mopidy.listener
Sending playback_state_changed to CoreListener: {'old_state': u'stopped', 'new_state': u'playing'}
DEBUG 2015-08-22 22:19:28,179 [855:Audio-2] mopidy.audio.gst
State change to GST_STATE_READY: result=GST_STATE_CHANGE_SUCCESS
DEBUG 2015-08-22 22:19:28,179 [855:MainThread] mopidy.audio.gst
Got state-changed message: old=GST_STATE_NULL new=GST_STATE_READY pending=GST_STATE_VOID_PENDING
INFO 2015-08-22 22:19:34,545 [855:MpdSession-32] mopidy.mpd.session
New MPD connection from [::ffff:127.0.0.1]:50713
DEBUG 2015-08-22 22:19:34,547 [855:MpdSession-32] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50713: status
ERROR 2015-08-22 22:19:38,324 [855:MpdSession-31] pykka
Unhandled exception in MpdSession (urn:uuid:8a894042-6120-4236-a944-cd336bd7c8b3):
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive
return self.on_receive(message)
File "/usr/local/lib/python2.7/site-packages/mopidy/internal/network.py", line 370, in on_receive
self.on_line_received(line)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/session.py", line 34, in on_line_received
response = self.dispatcher.handle_request(line)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 76, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 86, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 105, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 134, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 147, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 159, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 174, in _call_handler
return protocol.commands.call(tokens, context=self.context)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 180, in call
return self.handlers[tokens[0]](context, *tokens[1:])
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 158, in validate
return func(**callargs)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/command_list.py", line 42, in command_list_end
command, current_command_list_index=index)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 76, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 86, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 105, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 134, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 147, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 159, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 174, in _call_handler
return protocol.commands.call(tokens, context=self.context)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 180, in call
return self.handlers[tokens[0]](context, *tokens[1:])
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 158, in validate
return func(**callargs)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/playback.py", line 181, in play
return context.core.playback.play(tl_track).get()
File "/usr/local/lib/python2.7/site-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/usr/local/lib/python2.7/site-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/local/lib/python2.7/site-packages/mopidy/core/playback.py", line 305, in play
self._play(tl_track=tl_track, tlid=tlid, on_error_step=1)
File "/usr/local/lib/python2.7/site-packages/mopidy/core/playback.py", line 348, in _play
backend.playback.change_track(tl_track.track).get() and
File "/usr/local/lib/python2.7/site-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/usr/local/lib/python2.7/site-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/local/lib/python2.7/site-packages/mopidy/backend.py", line 245, in change_track
uri = self.translate_uri(track.uri)
File "/usr/local/lib/python2.7/site-packages/mopidy/stream/actor.py", line 90, in translate_uri
tracks = list(playlists.parse(content))
File "/usr/local/lib/python2.7/site-packages/mopidy/internal/playlists.py", line 28, in parse
return parse_urilist(data) # Fallback
File "/usr/local/lib/python2.7/site-packages/mopidy/internal/playlists.py", line 125, in parse_urilist
if not line.strip() or line.startswith('#'):
UnicodeDecodeError: 'ascii' codec can't decode byte 0xdf in position 154: ordinal not in range(128)
DEBUG 2015-08-22 22:19:38,326 [855:Audio-2] mopidy.audio.actor
Position query failed
|
UnicodeDecodeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.