after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def process_bookmark(hide="n", start_page=1, end_page=0):
try:
total_list = list()
print(f"My Member Id = {__br__._myId}")
if hide != "o":
print("Importing Bookmarks...")
total_list.extend(get_bookmarks(False, start_page, end_page, __br__._myId))
if hide != "n":
print("Importing Private Bookmarks...")
total_list.extend(get_bookmarks(True, start_page, end_page, __br__._myId))
print(f"Result: {str(len(total_list))} items.")
i = 0
current_member = 1
for item in total_list:
print(
"%d/%d\t%f %%"
% (i, len(total_list), 100.0 * i / float(len(total_list)))
)
i += 1
prefix = "[{0} of {1}]".format(current_member, len(total_list))
process_member(item.memberId, item.path, title_prefix=prefix)
current_member = current_member + 1
if len(total_list) > 0:
print(
"%d/%d\t%f %%"
% (i, len(total_list), 100.0 * i / float(len(total_list)))
)
else:
print("Cannot find any followed member.")
except KeyboardInterrupt:
raise
except BaseException:
PixivHelper.print_and_log(
"error", "Error at process_bookmark(): {0}".format(sys.exc_info())
)
raise
|
def process_bookmark(hide="n", start_page=1, end_page=0):
try:
total_list = list()
if hide != "o":
print("Importing Bookmarks...")
total_list.extend(get_bookmarks(False, start_page, end_page))
if hide != "n":
print("Importing Private Bookmarks...")
total_list.extend(get_bookmarks(True, start_page, end_page))
print("Result: ", str(len(total_list)), "items.")
i = 0
current_member = 1
for item in total_list:
print(
"%d/%d\t%f %%"
% (i, len(total_list), 100.0 * i / float(len(total_list)))
)
i += 1
prefix = "[{0} of {1}]".format(current_member, len(total_list))
process_member(item.memberId, item.path, title_prefix=prefix)
current_member = current_member + 1
print("%d/%d\t%f %%" % (i, len(total_list), 100.0 * i / float(len(total_list))))
except KeyboardInterrupt:
raise
except BaseException:
PixivHelper.print_and_log(
"error", "Error at process_bookmark(): {0}".format(sys.exc_info())
)
raise
|
https://github.com/Nandaka/PixivUtil2/issues/622
|
Input: 5
Include Private bookmarks [y/n/o]:
Start Page (default=1):
End Page (default=0, 0 for no limit):
Importing Bookmarks...
Exporting page 1
Source URL: https://www.pixiv.net/bookmark.php?type=user&p=1
Using default DB Path: /mnt/c/PixivUtil2/db.sqlite
No more data
Result: 0 items.
Error at process_bookmark(): (<class 'ZeroDivisionError'>, ZeroDivisionError('float division by zero'), <traceback object at 0x7fa7885a55a0>)
Traceback (most recent call last):
File "PixivUtil2.py", line 2407, in main
np_is_valid, op_is_valid, selection = main_loop(ewd, op_is_valid, selection, np_is_valid, args)
File "PixivUtil2.py", line 2127, in main_loop
menu_download_from_online_user_bookmark(op_is_valid, args)
File "PixivUtil2.py", line 1722, in menu_download_from_online_user_bookmark
process_bookmark(hide, start_page, end_page)
File "PixivUtil2.py", line 1235, in process_bookmark
print("%d/%d\t%f %%" % (i, len(total_list), 100.0 * i / float(len(total_list))))
ZeroDivisionError: float division by zero
press enter to exit.
|
ZeroDivisionError
|
def writeConfig(self, error=False, path=None):
"""Backup old config if exist and write updated config.ini"""
print("Writing config file...", end=" ")
config = configparser.RawConfigParser()
config.add_section("Network")
config.set("Network", "useProxy", self.useProxy)
config.set("Network", "proxyAddress", self.proxyAddress)
config.set("Network", "useragent", self.useragent)
config.set("Network", "useRobots", self.useRobots)
config.set("Network", "timeout", self.timeout)
config.set("Network", "retry", self.retry)
config.set("Network", "retrywait", self.retryWait)
config.set("Network", "downloadDelay", self.downloadDelay)
config.set("Network", "checkNewVersion", self.checkNewVersion)
config.set("Network", "enableSSLVerification", self.enableSSLVerification)
config.add_section("Debug")
config.set("Debug", "logLevel", self.logLevel)
config.set("Debug", "enableDump", self.enableDump)
config.set("Debug", "skipDumpFilter", self.skipDumpFilter)
config.set("Debug", "dumpMediumPage", self.dumpMediumPage)
config.set("Debug", "dumpTagSearchPage", self.dumpTagSearchPage)
config.set("Debug", "debugHttp", self.debugHttp)
config.add_section("IrfanView")
config.set("IrfanView", "IrfanViewPath", self.IrfanViewPath)
config.set("IrfanView", "startIrfanView", self.startIrfanView)
config.set("IrfanView", "startIrfanSlide", self.startIrfanSlide)
config.set("IrfanView", "createDownloadLists", self.createDownloadLists)
config.add_section("Settings")
config.set("Settings", "downloadListDirectory", self.downloadListDirectory)
config.set("Settings", "useList", self.useList)
config.set("Settings", "processFromDb", self.processFromDb)
config.set("Settings", "overwrite", self.overwrite)
config.set("Settings", "daylastupdated", self.dayLastUpdated)
config.set("Settings", "rootdirectory", self.rootDirectory)
config.set("Settings", "alwaysCheckFileSize", self.alwaysCheckFileSize)
config.set("Settings", "checkUpdatedLimit", self.checkUpdatedLimit)
config.set("Settings", "downloadAvatar", self.downloadAvatar)
config.set("Settings", "useBlacklistTags", self.useBlacklistTags)
config.set("Settings", "useSuppressTags", self.useSuppressTags)
config.set("Settings", "tagsLimit", self.tagsLimit)
config.set("Settings", "writeImageInfo", self.writeImageInfo)
config.set("Settings", "writeImageJSON", self.writeImageJSON)
config.set("Settings", "dateDiff", self.dateDiff)
config.set("Settings", "backupOldFile", self.backupOldFile)
config.set("Settings", "enableInfiniteLoop", self.enableInfiniteLoop)
config.set("Settings", "verifyImage", self.verifyImage)
config.set("Settings", "writeUrlInDescription", self.writeUrlInDescription)
config.set("Settings", "urlBlacklistRegex", self.urlBlacklistRegex)
config.set("Settings", "dbPath", self.dbPath)
config.set("Settings", "useBlacklistMembers", self.useBlacklistMembers)
config.set("Settings", "setLastModified", self.setLastModified)
config.set("Settings", "useLocalTimezone", self.useLocalTimezone)
config.add_section("Filename")
config.set("Filename", "filenameFormat", self.filenameFormat)
config.set("Filename", "filenameMangaFormat", self.filenameMangaFormat)
config.set("Filename", "filenameInfoFormat", self.filenameInfoFormat)
config.set("Filename", "avatarNameFormat", self.avatarNameFormat)
config.set("Filename", "tagsSeparator", self.tagsSeparator)
config.set("Filename", "createMangaDir", self.createMangaDir)
config.set("Filename", "useTagsAsDir", self.useTagsAsDir)
config.set("Filename", "urlDumpFilename", self.urlDumpFilename)
config.add_section("Authentication")
config.set("Authentication", "username", self.username)
config.set("Authentication", "password", self.password)
config.set("Authentication", "cookie", self.cookie)
config.set("Authentication", "refresh_token", self.refresh_token)
config.add_section("Pixiv")
config.set("Pixiv", "numberOfPage", self.numberOfPage)
config.set("Pixiv", "R18Mode", self.r18mode)
config.set("Pixiv", "DateFormat", self.dateFormat)
config.add_section("FFmpeg")
config.set("FFmpeg", "ffmpeg", self.ffmpeg)
config.set("FFmpeg", "ffmpegCodec", self.ffmpegCodec)
config.set("FFmpeg", "ffmpegParam", self.ffmpegParam)
config.set("FFmpeg", "webpCodec", self.webpCodec)
config.set("FFmpeg", "webpParam", self.webpParam)
config.add_section("Ugoira")
config.set("Ugoira", "writeUgoiraInfo", self.writeUgoiraInfo)
config.set("Ugoira", "createUgoira", self.createUgoira)
config.set("Ugoira", "deleteZipFile", self.deleteZipFile)
config.set("Ugoira", "createGif", self.createGif)
config.set("Ugoira", "createApng", self.createApng)
config.set("Ugoira", "deleteUgoira", self.deleteUgoira)
config.set("Ugoira", "createWebm", self.createWebm)
config.set("Ugoira", "createWebp", self.createWebp)
if path is not None:
configlocation = path
else:
configlocation = "config.ini"
try:
# with codecs.open('config.ini.bak', encoding = 'utf-8', mode = 'wb') as configfile:
with open(configlocation + ".tmp", "w", encoding="utf8") as configfile:
config.write(configfile)
if os.path.exists(configlocation):
if error:
backupName = configlocation + ".error-" + str(int(time.time()))
print("Backing up old config (error exist!) to " + backupName)
shutil.move(configlocation, backupName)
else:
print("Backing up old config to config.ini.bak")
shutil.move(configlocation, configlocation + ".bak")
os.rename(configlocation + ".tmp", configlocation)
except BaseException:
self.__logger.exception("Error at writeConfig()")
raise
print("done.")
|
def writeConfig(self, error=False, path=None):
"""Backup old config if exist and write updated config.ini"""
print("Writing config file...", end=" ")
config = configparser.RawConfigParser()
config.add_section("Network")
config.set("Network", "useProxy", self.useProxy)
config.set("Network", "proxyAddress", self.proxyAddress)
config.set("Network", "useragent", self.useragent)
config.set("Network", "useRobots", self.useRobots)
config.set("Network", "timeout", self.timeout)
config.set("Network", "retry", self.retry)
config.set("Network", "retrywait", self.retryWait)
config.set("Network", "downloadDelay", self.downloadDelay)
config.set("Network", "checkNewVersion", self.checkNewVersion)
config.set("Network", "enableSSLVerification", self.enableSSLVerification)
config.add_section("Debug")
config.set("Debug", "logLevel", self.logLevel)
config.set("Debug", "enableDump", self.enableDump)
config.set("Debug", "skipDumpFilter", self.skipDumpFilter)
config.set("Debug", "dumpMediumPage", self.dumpMediumPage)
config.set("Debug", "dumpTagSearchPage", self.dumpTagSearchPage)
config.set("Debug", "debugHttp", self.debugHttp)
config.add_section("IrfanView")
config.set("IrfanView", "IrfanViewPath", self.IrfanViewPath)
config.set("IrfanView", "startIrfanView", self.startIrfanView)
config.set("IrfanView", "startIrfanSlide", self.startIrfanSlide)
config.set("IrfanView", "createDownloadLists", self.createDownloadLists)
config.add_section("Settings")
config.set("Settings", "downloadListDirectory", self.downloadListDirectory)
config.set("Settings", "useList", self.useList)
config.set("Settings", "processFromDb", self.processFromDb)
config.set("Settings", "overwrite", self.overwrite)
config.set("Settings", "daylastupdated", self.dayLastUpdated)
config.set("Settings", "rootdirectory", self.rootDirectory)
config.set("Settings", "alwaysCheckFileSize", self.alwaysCheckFileSize)
config.set("Settings", "checkUpdatedLimit", self.checkUpdatedLimit)
config.set("Settings", "downloadAvatar", self.downloadAvatar)
config.set("Settings", "useBlacklistTags", self.useBlacklistTags)
config.set("Settings", "useSuppressTags", self.useSuppressTags)
config.set("Settings", "tagsLimit", self.tagsLimit)
config.set("Settings", "writeImageInfo", self.writeImageInfo)
config.set("Settings", "writeImageJSON", self.writeImageJSON)
config.set("Settings", "dateDiff", self.dateDiff)
config.set("Settings", "backupOldFile", self.backupOldFile)
config.set("Settings", "enableInfiniteLoop", self.enableInfiniteLoop)
config.set("Settings", "verifyImage", self.verifyImage)
config.set("Settings", "writeUrlInDescription", self.writeUrlInDescription)
config.set("Settings", "urlBlacklistRegex", self.urlBlacklistRegex)
config.set("Settings", "dbPath", self.dbPath)
config.set("Settings", "useBlacklistMembers", self.useBlacklistMembers)
config.set("Settings", "setLastModified", self.setLastModified)
config.set("Settings", "useLocalTimezone", self.useLocalTimezone)
config.add_section("Filename")
config.set("Filename", "filenameFormat", self.filenameFormat)
config.set("Filename", "filenameMangaFormat", self.filenameMangaFormat)
config.set("Filename", "filenameInfoFormat", self.filenameInfoFormat)
config.set("Filename", "avatarNameFormat", self.avatarNameFormat)
config.set("Filename", "tagsSeparator", self.tagsSeparator)
config.set("Filename", "createMangaDir", self.createMangaDir)
config.set("Filename", "useTagsAsDir", self.useTagsAsDir)
config.set("Filename", "urlDumpFilename", self.urlDumpFilename)
config.add_section("Authentication")
config.set("Authentication", "username", self.username)
config.set("Authentication", "password", self.password)
config.set("Authentication", "cookie", self.cookie)
config.set("Authentication", "refresh_token", self.refresh_token)
config.add_section("Pixiv")
config.set("Pixiv", "numberOfPage", self.numberOfPage)
config.set("Pixiv", "R18Mode", self.r18mode)
config.set("Pixiv", "DateFormat", self.dateFormat)
config.add_section("FFmpeg")
config.set("FFmpeg", "ffmpeg", self.ffmpeg)
config.set("FFmpeg", "ffmpegCodec", self.ffmpegCodec)
config.set("FFmpeg", "ffmpegParam", self.ffmpegParam)
config.set("FFmpeg", "webpCodec", self.webpCodec)
config.set("FFmpeg", "webpParam", self.webpParam)
config.add_section("Ugoira")
config.set("Ugoira", "writeUgoiraInfo", self.writeUgoiraInfo)
config.set("Ugoira", "createUgoira", self.createUgoira)
config.set("Ugoira", "deleteZipFile", self.deleteZipFile)
config.set("Ugoira", "createGif", self.createGif)
config.set("Ugoira", "createApng", self.createApng)
config.set("Ugoira", "deleteUgoira", self.deleteUgoira)
config.set("Ugoira", "createWebm", self.createWebm)
config.set("Ugoira", "createWebp", self.createWebp)
if path is not None:
configlocation = path
else:
configlocation = "config.ini"
try:
# with codecs.open('config.ini.bak', encoding = 'utf-8', mode = 'wb') as configfile:
with open(configlocation + ".tmp", "w") as configfile:
config.write(configfile)
if os.path.exists(configlocation):
if error:
backupName = configlocation + ".error-" + str(int(time.time()))
print("Backing up old config (error exist!) to " + backupName)
shutil.move(configlocation, backupName)
else:
print("Backing up old config to config.ini.bak")
shutil.move(configlocation, configlocation + ".bak")
os.rename(configlocation + ".tmp", configlocation)
except BaseException:
self.__logger.exception("Error at writeConfig()")
raise
print("done.")
|
https://github.com/Nandaka/PixivUtil2/issues/606
|
2019-12-27 21:16:44,158 - PixivUtil20191221 - ERROR - Error at loadConfig()
Traceback (most recent call last):
File "PixivConfig.pyc", line 125, in loadConfig
File "configparser.pyc", line 718, in read_file
File "configparser.pyc", line 1017, in _read
File "codecs.pyc", line 714, in __next__
File "codecs.pyc", line 645, in __next__
File "codecs.pyc", line 558, in readline
File "codecs.pyc", line 504, in read
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xa5 in position 16: invalid start byte
|
UnicodeDecodeError
|
def dump_html(filename, html_text):
isDumpEnabled = True
filename = sanitize_filename(filename)
if _config is not None:
isDumpEnabled = _config.enableDump
if _config.enableDump:
if len(_config.skipDumpFilter) > 0:
matchResult = re.findall(_config.skipDumpFilter, filename)
if matchResult is not None and len(matchResult) > 0:
isDumpEnabled = False
if html_text is not None and len(html_text) == 0:
print_and_log("info", "Empty Html.")
return ""
if isDumpEnabled:
try:
dump = open(filename, "wb")
dump.write(str(html_text))
dump.close()
return filename
except IOError as ex:
print_and_log("error", str(ex))
print_and_log("info", "Dump File created: {0}".format(filename))
else:
print_and_log("info", "Dump not enabled.")
return ""
|
def dump_html(filename, html_text):
isDumpEnabled = True
filename = sanitize_filename(filename)
if _config is not None:
isDumpEnabled = _config.enableDump
if _config.enableDump:
if len(_config.skipDumpFilter) > 0:
matchResult = re.findall(_config.skipDumpFilter, filename)
if matchResult is not None and len(matchResult) > 0:
isDumpEnabled = False
if html_text is not None and len(html_text) == 0:
print_and_log("info", "Empty Html.")
return ""
if isDumpEnabled:
try:
dump = open(filename, "wb", encoding="utf-8")
dump.write(str(html_text))
dump.close()
return filename
except IOError as ex:
print_and_log("error", str(ex))
print_and_log("info", "Dump File created: {0}".format(filename))
else:
print_and_log("info", "Dump not enabled.")
return ""
|
https://github.com/Nandaka/PixivUtil2/issues/580
|
Input: 1
Member ids: 668238
Start Page (default=1):
End Page (default=0, 0 for no limit):
Member IDs: [668238]
Processing Member Id: 668238
Reading /mnt/c/PixivUtil2/config.ini ...
done.
Page 1
Member Url: https://www.pixiv.net/ajax/user/668238/profile/all
Member ID (668238): 1002 This user account has been suspended., hasDumpPage=Y
Traceback (most recent call last):
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 480, in getMemberInfoWhitecube
info_ajax_str = self.open_with_retry(url_ajax).read()
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 147, in open_with_retry
return self.open(url, data, timeout)
File "/home/whinette/.local/lib/python3.7/site-packages/mechanize/_mechanize.py", line 253, in open
return self._mech_open(url_or_request, data, timeout=timeout)
File "/home/whinette/.local/lib/python3.7/site-packages/mechanize/_mechanize.py", line 309, in _mech_open
raise response
mechanize._response.get_seek_wrapper_class.<locals>.httperror_seek_wrapper: HTTP Error 403: Forbidden
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "PixivUtil2.py", line 443, in process_member
(artist, list_page) = PixivBrowserFactory.getBrowser().getMemberPage(member_id, page, bookmark, tags)
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 557, in getMemberPage
self.getMemberInfoWhitecube(member_id, artist, bookmark)
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 506, in getMemberInfoWhitecube
msg, errorCode=PixivException.USER_ID_SUSPENDED, htmlPage=errorMessage)
PixivException.PixivException: 1002 This user account has been suspended., hasDumpPage=Y
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "PixivUtil2.py", line 454, in process_member
PixivHelper.dump_html("Dump for " + str(member_id) + " Error Code " + str(ex.errorCode) + ".html", list_page)
File "/mnt/c/PixivUtil2/PixivHelper.py", line 422, in dump_html
dump = open(filename, 'wb', encoding="utf-8")
ValueError: binary mode doesn't take an encoding argument
Error at process_member(): (<class 'ValueError'>, ValueError("binary mode doesn't take an encoding argument"), <traceback object at 0x7f162a5ad7d0>)
Cannot dump page for member_id: 668238
Traceback (most recent call last):
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 480, in getMemberInfoWhitecube
info_ajax_str = self.open_with_retry(url_ajax).read()
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 147, in open_with_retry
return self.open(url, data, timeout)
File "/home/whinette/.local/lib/python3.7/site-packages/mechanize/_mechanize.py", line 253, in open
return self._mech_open(url_or_request, data, timeout=timeout)
File "/home/whinette/.local/lib/python3.7/site-packages/mechanize/_mechanize.py", line 309, in _mech_open
raise response
mechanize._response.get_seek_wrapper_class.<locals>.httperror_seek_wrapper: HTTP Error 403: Forbidden
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "PixivUtil2.py", line 443, in process_member
(artist, list_page) = PixivBrowserFactory.getBrowser().getMemberPage(member_id, page, bookmark, tags)
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 557, in getMemberPage
self.getMemberInfoWhitecube(member_id, artist, bookmark)
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 506, in getMemberInfoWhitecube
msg, errorCode=PixivException.USER_ID_SUSPENDED, htmlPage=errorMessage)
PixivException.PixivException: 1002 This user account has been suspended., hasDumpPage=Y
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "PixivUtil2.py", line 2404, in main
np_is_valid, op_is_valid, selection = main_loop(ewd, op_is_valid, selection, np_is_valid, args)
File "PixivUtil2.py", line 2114, in main_loop
menu_download_by_member_id(op_is_valid, args)
File "PixivUtil2.py", line 1528, in menu_download_by_member_id
process_member(member_id, page=page, end_page=end_page, title_prefix=prefix)
File "PixivUtil2.py", line 454, in process_member
PixivHelper.dump_html("Dump for " + str(member_id) + " Error Code " + str(ex.errorCode) + ".html", list_page)
File "/mnt/c/PixivUtil2/PixivHelper.py", line 422, in dump_html
dump = open(filename, 'wb', encoding="utf-8")
ValueError: binary mode doesn't take an encoding argument
press enter to exit.
|
ValueError
|
def dump_html(filename, html_text):
isDumpEnabled = True
filename = sanitize_filename(filename)
if _config is not None:
isDumpEnabled = _config.enableDump
if _config.enableDump:
if len(_config.skipDumpFilter) > 0:
matchResult = re.findall(_config.skipDumpFilter, filename)
if matchResult is not None and len(matchResult) > 0:
isDumpEnabled = False
if html_text is not None and len(html_text) == 0:
print_and_log("info", "Empty Html.")
return ""
if isDumpEnabled:
try:
dump = open(filename, "wb")
dump.write(html_text)
dump.close()
return filename
except IOError as ex:
print_and_log("error", str(ex))
print_and_log("info", "Dump File created: {0}".format(filename))
else:
print_and_log("info", "Dump not enabled.")
return ""
|
def dump_html(filename, html_text):
isDumpEnabled = True
filename = sanitize_filename(filename)
if _config is not None:
isDumpEnabled = _config.enableDump
if _config.enableDump:
if len(_config.skipDumpFilter) > 0:
matchResult = re.findall(_config.skipDumpFilter, filename)
if matchResult is not None and len(matchResult) > 0:
isDumpEnabled = False
if html_text is not None and len(html_text) == 0:
print_and_log("info", "Empty Html.")
return ""
if isDumpEnabled:
try:
dump = open(filename, "wb")
dump.write(str(html_text))
dump.close()
return filename
except IOError as ex:
print_and_log("error", str(ex))
print_and_log("info", "Dump File created: {0}".format(filename))
else:
print_and_log("info", "Dump not enabled.")
return ""
|
https://github.com/Nandaka/PixivUtil2/issues/580
|
Input: 1
Member ids: 668238
Start Page (default=1):
End Page (default=0, 0 for no limit):
Member IDs: [668238]
Processing Member Id: 668238
Reading /mnt/c/PixivUtil2/config.ini ...
done.
Page 1
Member Url: https://www.pixiv.net/ajax/user/668238/profile/all
Member ID (668238): 1002 This user account has been suspended., hasDumpPage=Y
Traceback (most recent call last):
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 480, in getMemberInfoWhitecube
info_ajax_str = self.open_with_retry(url_ajax).read()
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 147, in open_with_retry
return self.open(url, data, timeout)
File "/home/whinette/.local/lib/python3.7/site-packages/mechanize/_mechanize.py", line 253, in open
return self._mech_open(url_or_request, data, timeout=timeout)
File "/home/whinette/.local/lib/python3.7/site-packages/mechanize/_mechanize.py", line 309, in _mech_open
raise response
mechanize._response.get_seek_wrapper_class.<locals>.httperror_seek_wrapper: HTTP Error 403: Forbidden
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "PixivUtil2.py", line 443, in process_member
(artist, list_page) = PixivBrowserFactory.getBrowser().getMemberPage(member_id, page, bookmark, tags)
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 557, in getMemberPage
self.getMemberInfoWhitecube(member_id, artist, bookmark)
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 506, in getMemberInfoWhitecube
msg, errorCode=PixivException.USER_ID_SUSPENDED, htmlPage=errorMessage)
PixivException.PixivException: 1002 This user account has been suspended., hasDumpPage=Y
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "PixivUtil2.py", line 454, in process_member
PixivHelper.dump_html("Dump for " + str(member_id) + " Error Code " + str(ex.errorCode) + ".html", list_page)
File "/mnt/c/PixivUtil2/PixivHelper.py", line 422, in dump_html
dump = open(filename, 'wb', encoding="utf-8")
ValueError: binary mode doesn't take an encoding argument
Error at process_member(): (<class 'ValueError'>, ValueError("binary mode doesn't take an encoding argument"), <traceback object at 0x7f162a5ad7d0>)
Cannot dump page for member_id: 668238
Traceback (most recent call last):
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 480, in getMemberInfoWhitecube
info_ajax_str = self.open_with_retry(url_ajax).read()
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 147, in open_with_retry
return self.open(url, data, timeout)
File "/home/whinette/.local/lib/python3.7/site-packages/mechanize/_mechanize.py", line 253, in open
return self._mech_open(url_or_request, data, timeout=timeout)
File "/home/whinette/.local/lib/python3.7/site-packages/mechanize/_mechanize.py", line 309, in _mech_open
raise response
mechanize._response.get_seek_wrapper_class.<locals>.httperror_seek_wrapper: HTTP Error 403: Forbidden
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "PixivUtil2.py", line 443, in process_member
(artist, list_page) = PixivBrowserFactory.getBrowser().getMemberPage(member_id, page, bookmark, tags)
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 557, in getMemberPage
self.getMemberInfoWhitecube(member_id, artist, bookmark)
File "/mnt/c/PixivUtil2/PixivBrowserFactory.py", line 506, in getMemberInfoWhitecube
msg, errorCode=PixivException.USER_ID_SUSPENDED, htmlPage=errorMessage)
PixivException.PixivException: 1002 This user account has been suspended., hasDumpPage=Y
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "PixivUtil2.py", line 2404, in main
np_is_valid, op_is_valid, selection = main_loop(ewd, op_is_valid, selection, np_is_valid, args)
File "PixivUtil2.py", line 2114, in main_loop
menu_download_by_member_id(op_is_valid, args)
File "PixivUtil2.py", line 1528, in menu_download_by_member_id
process_member(member_id, page=page, end_page=end_page, title_prefix=prefix)
File "PixivUtil2.py", line 454, in process_member
PixivHelper.dump_html("Dump for " + str(member_id) + " Error Code " + str(ex.errorCode) + ".html", list_page)
File "/mnt/c/PixivUtil2/PixivHelper.py", line 422, in dump_html
dump = open(filename, 'wb', encoding="utf-8")
ValueError: binary mode doesn't take an encoding argument
press enter to exit.
|
ValueError
|
def WriteInfo(self, filename):
info = None
try:
# Issue #421 ensure subdir exists.
PixivHelper.makeSubdirs(filename)
info = codecs.open(filename, "wb", encoding="utf-8")
except IOError:
info = codecs.open(str(self.imageId) + ".txt", "wb", encoding="utf-8")
PixivHelper.GetLogger().exception(
"Error when saving image info: %s, file is saved to: %s.txt",
filename,
self.imageId,
)
info.write("ArtistID = {0}\r\n".format(self.parent.artistId))
info.write("ArtistName = {0}\r\n".format(self.parent.artistName))
info.write("ImageID = {0}\r\n".format(self.imageId))
info.write("Title = {0}\r\n".format(self.imageTitle))
info.write("Caption = {0}\r\n".format(self.body_text))
# info.write(u"Tags = " + ", ".join(self.imageTags) + "\r\n")
if self.is_restricted:
info.write("Image Mode = {0}, Restricted\r\n".format(self.type))
else:
info.write("Image Mode = {0}\r\n".format(self.type))
info.write("Pages = {0}\r\n".format(self.imageCount))
info.write("Date = {0}\r\n".format(self.worksDate))
# info.write(u"Resolution = " + self.worksResolution + "\r\n")
# info.write(u"Tools = " + self.worksTools + "\r\n")
info.write("Like Count = {0}\r\n".format(self.likeCount))
info.write(
"Link = https://www.pixiv.net/fanbox/creator/{0}/post/{1}\r\n".format(
self.parent.artistId, self.imageId
)
)
# info.write("Ugoira Data = " + str(self.ugoira_data) + "\r\n")
if len(self.embeddedFiles) > 0:
info.write("Urls =\r\n")
for link in self.embeddedFiles:
info.write(" - {0}\r\n".format(link))
info.close()
|
def WriteInfo(self, filename):
info = None
try:
# Issue #421 ensure subdir exists.
PixivHelper.makeSubdirs(filename)
info = codecs.open(filename, "wb", encoding="utf-8")
except IOError:
info = codecs.open(str(self.imageId) + ".txt", "wb", encoding="utf-8")
PixivHelper.GetLogger().exception(
"Error when saving image info: {0}, file is saved to: {1}.txt".format(
filename, self.imageId
)
)
info.write("ArtistID = {0}\r\n".format(self.parent.artistId))
info.write("ArtistName = {0}\r\n".format(self.parent.artistName))
info.write("ImageID = {0}\r\n".format(self.imageId))
info.write("Title = {0}\r\n".format(self.imageTitle))
info.write("Caption = {0}\r\n".format(self.body_text))
# info.write(u"Tags = " + ", ".join(self.imageTags) + "\r\n")
if self.is_restricted:
info.write("Image Mode = {0}, Restricted\r\n".format(self.type))
else:
info.write("Image Mode = {0}\r\n".format(self.type))
info.write("Pages = {0}\r\n".format(self.imageCount))
info.write("Date = {0}\r\n".format(self.worksDate))
# info.write(u"Resolution = " + self.worksResolution + "\r\n")
# info.write(u"Tools = " + self.worksTools + "\r\n")
info.write("Like Count = {0}\r\n".format(self.likeCount))
info.write(
"Link = https://www.pixiv.net/fanbox/creator/{0}/post/{1}\r\n".format(
self.parent.artistId, self.imageId
)
)
# info.write("Ugoira Data = " + str(self.ugoira_data) + "\r\n")
if len(self.embeddedFiles) > 0:
info.write("Urls =\r\n")
for link in self.embeddedFiles:
info.write(" - {0}\r\n".format(link))
info.close()
|
https://github.com/Nandaka/PixivUtil2/issues/540
|
Traceback (most recent call last):
File "PixivUtil2.py", line 270, in download_image
File "zipfile.pyc", line 901, in testzip
File "zipfile.pyc", line 993, in open
RuntimeError: File cover.jpg is encrypted, password required for extraction
Error at download_image(): (<type 'exceptions.RuntimeError'>, RuntimeError('File cover.jpg is encrypted, password required for extraction',), <traceback object at 0x042BE030>) at [REDACTED link for privacy reasons] (9000)
Traceback (most recent call last):
File "PixivUtil2.py", line 2369, in main
File "PixivUtil2.py", line 2126, in main_loop
File "PixivUtil2.py", line 1876, in menu_fanbox_download_supported_artist
File "PixivUtil2.py", line 1925, in processFanboxArtist
File "PixivUtil2.py", line 1978, in processFanboxImages
File "PixivUtil2.py", line 270, in download_image
File "zipfile.pyc", line 901, in testzip
File "zipfile.pyc", line 993, in open
RuntimeError: File cover.jpg is encrypted, password required for extraction
press enter to exit.
|
RuntimeError
|
def follow_user(browser, follow_restrict, login, user_name):
"""Follows the user of the currently opened image"""
try:
follow_button = browser.find_element_by_xpath("//*[contains(text(), 'Follow')]")
sleep(2) # Do we still need this sleep?
if follow_button.is_displayed():
follow_button.send_keys("\n")
else:
driver.execute_script(
"arguments[0].style.visibility = 'visible'; arguments[0].style.height = '10px'; arguments[0].style.width = '10px'; arguments[0].style.opacity = 1",
follow_button,
)
follow_button.click()
print("--> Now following")
log_followed_pool(login, user_name)
follow_restrict[user_name] = follow_restrict.get(user_name, 0) + 1
sleep(3)
return 1
except NoSuchElementException:
print("--> Already following")
sleep(1)
return 0
|
def follow_user(browser, follow_restrict, login, user_name):
"""Follows the user of the currently opened image"""
try:
follow_button = browser.find_element_by_xpath("//*[contains(text(), 'Follow')]")
sleep(2) # Do we still need this sleep?
follow_button.send_keys("\n")
print("--> Now following")
log_followed_pool(login, user_name)
follow_restrict[user_name] = follow_restrict.get(user_name, 0) + 1
sleep(3)
return 1
except NoSuchElementException:
print("--> Already following")
sleep(1)
return 0
|
https://github.com/timgrossmann/InstaPy/issues/615
|
[7/33]
Image from: b'bruhmusicals'
Number of Followers: 399157
Link: b'https://www.instagram.com/p/BYdcFanFczK/?tagged=followme'
Description: b'Comment "prank" letter by letter\xf0\x9f\x98\x82 // Follow @bruhmusicals if you\'re viewing\xf0\x9f\x94\xa5'
--> Image Liked!
--> Commented: b'Super!'
Traceback (most recent call last):
File "quickstart.py", line 15, in <module>
.set_comments(['Super!','Nice!','Cool'])\
File "/home/tm/Desktop/Projets Info/Instagram/InstaPy/instapy/instapy.py", line 531, in like_by_tags
followed += follow_user(self.browser, self.follow_restrict, self.username, user_name)
File "/home/tm/Desktop/Projets Info/Instagram/InstaPy/instapy/unfollow_util.py", line 134, in follow_user
follow_button.send_keys("\n")
File "/usr/lib/python3.6/site-packages/selenium/webdriver/remote/webelement.py", line 350, in send_keys
'value': keys_to_typing(value)})
File "/usr/lib/python3.6/site-packages/selenium/webdriver/remote/webelement.py", line 499, in _execute
return self._parent.execute(command, params)
File "/usr/lib/python3.6/site-packages/selenium/webdriver/remote/webdriver.py", line 297, in execute
self.error_handler.check_response(response)
File "/usr/lib/python3.6/site-packages/selenium/webdriver/remote/errorhandler.py", line 194, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.ElementNotVisibleException: Message: element not visible
(Session info: chrome=60.0.3112.101)
(Driver info: chromedriver=2.29.461571 (8a88bbe0775e2a23afda0ceaf2ef7ee74e822cc5),platform=Linux 4.12.8-2-ARCH x86_64)
|
selenium.common.exceptions.ElementNotVisibleException
|
def interact_user_followers(self, usernames, amount=10, random=False):
userToInteract = []
if not isinstance(usernames, list):
usernames = [usernames]
try:
for user in usernames:
user = get_given_user_followers(
self.browser,
user,
amount,
self.dont_include,
self.username,
self.follow_restrict,
random,
)
if type(user) is list:
userToInteract += user
except (TypeError, RuntimeWarning) as err:
if type(err) == RuntimeWarning:
print("Warning: {} , stopping follow_users".format(err))
self.logFile.write("Warning: {} , stopping follow_users\n".format(err))
return self
else:
print("Sorry, an error occured: {}".format(err))
self.logFile.write("Sorry, an error occured: {}\n".format(err))
self.aborting = True
return self
print("--> Users: {} \n".format(len(userToInteract)))
userToInteract = sample(
userToInteract,
int(ceil(self.user_interact_percentage * len(userToInteract) / 100)),
)
self.like_by_users(
userToInteract,
self.user_interact_amount,
self.user_interact_random,
self.user_interact_media,
)
return self
|
def interact_user_followers(self, usernames, amount=10, random=False):
userToInteract = []
if not isinstance(usernames, list):
usernames = [usernames]
try:
for user in usernames:
userToInteract += get_given_user_followers(
self.browser,
user,
amount,
self.dont_include,
self.username,
self.follow_restrict,
random,
)
except (TypeError, RuntimeWarning) as err:
if type(err) == RuntimeWarning:
print("Warning: {} , stopping follow_users".format(err))
self.logFile.write("Warning: {} , stopping follow_users\n".format(err))
return self
else:
print("Sorry, an error occured: {}".format(err))
self.logFile.write("Sorry, an error occured: {}\n".format(err))
self.aborting = True
return self
print("--> Users: {}".format(len(userToInteract)))
print("")
userToInteract = sample(
userToInteract,
int(ceil(self.user_interact_percentage * len(userToInteract) / 100)),
)
self.like_by_users(
userToInteract,
self.user_interact_amount,
self.user_interact_random,
self.user_interact_media,
)
return self
|
https://github.com/timgrossmann/InstaPy/issues/751
|
Logged in successfully!
Traceback (most recent call last):
File "test4_instapy.py", line 24, in <module>
session.interact_user_followers(['User1'], amount=12, random=True)
File "/virt_uni/InstaPy/instapy/instapy.py", line 849, in interact_user_followers
userToInteract += get_given_user_followers(self.browser, user, amount, self.dont_include, self.username, self.follow_restrict, random)
File "/virt_uni/InstaPy/instapy/unfollow_util.py", line 327, in get_given_user_followers
allfollowing = formatNumber(browser.find_element_by_xpath("//li[2]/a/span").text)
File "/usr/local/lib/python3.4/dist-packages/selenium/webdriver/remote/webdriver.py", line 293, in find_element_by_xpath
return self.find_element(by=By.XPATH, value=xpath)
File "/usr/local/lib/python3.4/dist-packages/selenium/webdriver/remote/webdriver.py", line 752, in find_element
'value': value})['value']
File "/usr/local/lib/python3.4/dist-packages/selenium/webdriver/remote/webdriver.py", line 236, in execute
self.error_handler.check_response(response)
File "/usr/local/lib/python3.4/dist-packages/selenium/webdriver/remote/errorhandler.py", line 192, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":"//li[2]/a/span"}
(Session info: chrome=61.0.3163.100)
(Driver info: chromedriver=2.29.461571 (8a88bbe0775e2a23afda0ceaf2ef7ee74e822cc5),platform=Linux 3.16.0-4-amd64 x86_64)
|
selenium.common.exceptions.NoSuchElementException
|
def get_given_user_followers(
browser, user_name, amount, dont_include, login, follow_restrict, is_random
):
browser.get("https://www.instagram.com/" + user_name)
# check how many poeple are following this user.
# throw RuntimeWarning if we are 0 people following this user or
# if its a private account
try:
allfollowing = formatNumber(
browser.find_element_by_xpath("//li[2]/a/span").text
)
except NoSuchElementException:
print("Can't interact with private account")
return
following_link = browser.find_elements_by_xpath(
'//a[@href="/' + user_name + '/followers/"]'
)
following_link[0].send_keys("\n")
sleep(2)
# find dialog box
dialog = browser.find_element_by_xpath(
"//div[text()='Followers']/following-sibling::div"
)
# scroll down the page
scroll_bottom(browser, dialog, allfollowing)
# get follow buttons. This approch will find the follow buttons and
# ignore the Unfollow/Requested buttons.
follow_buttons = dialog.find_elements_by_xpath(
"//div/div/span/button[text()='Follow']"
)
person_list = []
if amount >= len(follow_buttons):
amount = len(follow_buttons)
print(user_name + " -> Less users to follow than requested.")
finalBtnPerson = []
if is_random:
sample = random.sample(range(0, len(follow_buttons)), amount)
for num in sample:
finalBtnPerson.append(follow_buttons[num])
else:
finalBtnPerson = follow_buttons[0:amount]
for person in finalBtnPerson:
if person and hasattr(person, "text") and person.text:
person_list.append(
person.find_element_by_xpath("../../../*")
.find_elements_by_tag_name("a")[1]
.text
)
return person_list
|
def get_given_user_followers(
browser, user_name, amount, dont_include, login, follow_restrict, is_random
):
browser.get("https://www.instagram.com/" + user_name)
# check how many poeple are following this user.
allfollowing = formatNumber(browser.find_element_by_xpath("//li[2]/a/span").text)
# throw RuntimeWarning if we are 0 people following this user
if allfollowing == 0:
raise RuntimeWarning("There are 0 people to follow")
try:
following_link = browser.find_elements_by_xpath(
'//a[@href="/' + user_name + '/followers/"]'
)
following_link[0].send_keys("\n")
except BaseException as e:
print("following_link error \n", str(e))
followNum = 0
sleep(2)
person_followed = []
# find dialog box
dialog = browser.find_element_by_xpath(
"//div[text()='Followers']/following-sibling::div"
)
# scroll down the page
scroll_bottom(browser, dialog, allfollowing)
# Get follow buttons. This approch will find the follow buttons and ignore the Unfollow/Requested buttons.
follow_buttons = dialog.find_elements_by_xpath(
"//div/div/span/button[text()='Follow']"
)
person_list = []
if amount >= len(follow_buttons):
amount = len(follow_buttons)
print(user_name + " -> Less users to follow than requested.")
finalBtnPerson = []
if is_random:
sample = random.sample(range(0, len(follow_buttons)), amount)
for num in sample:
finalBtnPerson.append(follow_buttons[num])
else:
finalBtnPerson = follow_buttons[0:amount]
for person in finalBtnPerson:
if person and hasattr(person, "text") and person.text:
person_list.append(
person.find_element_by_xpath("../../../*")
.find_elements_by_tag_name("a")[1]
.text
)
return person_list
|
https://github.com/timgrossmann/InstaPy/issues/751
|
Logged in successfully!
Traceback (most recent call last):
File "test4_instapy.py", line 24, in <module>
session.interact_user_followers(['User1'], amount=12, random=True)
File "/virt_uni/InstaPy/instapy/instapy.py", line 849, in interact_user_followers
userToInteract += get_given_user_followers(self.browser, user, amount, self.dont_include, self.username, self.follow_restrict, random)
File "/virt_uni/InstaPy/instapy/unfollow_util.py", line 327, in get_given_user_followers
allfollowing = formatNumber(browser.find_element_by_xpath("//li[2]/a/span").text)
File "/usr/local/lib/python3.4/dist-packages/selenium/webdriver/remote/webdriver.py", line 293, in find_element_by_xpath
return self.find_element(by=By.XPATH, value=xpath)
File "/usr/local/lib/python3.4/dist-packages/selenium/webdriver/remote/webdriver.py", line 752, in find_element
'value': value})['value']
File "/usr/local/lib/python3.4/dist-packages/selenium/webdriver/remote/webdriver.py", line 236, in execute
self.error_handler.check_response(response)
File "/usr/local/lib/python3.4/dist-packages/selenium/webdriver/remote/errorhandler.py", line 192, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":"//li[2]/a/span"}
(Session info: chrome=61.0.3163.100)
(Driver info: chromedriver=2.29.461571 (8a88bbe0775e2a23afda0ceaf2ef7ee74e822cc5),platform=Linux 3.16.0-4-amd64 x86_64)
|
selenium.common.exceptions.NoSuchElementException
|
def follow_through_dialog(
browser,
user_name,
amount,
dont_include,
login,
follow_restrict,
allfollowing,
is_random,
delay,
callbacks=[],
):
followNum = 0
sleep(2)
person_followed = []
if is_random:
# expanding the popultaion for better sampling distribution
amount = amount * 3
# find dialog box
dialog = browser.find_element_by_xpath(
"//div[text()='Followers' or text()='Following']/following-sibling::div"
)
# scroll down the page
scroll_bottom(browser, dialog, allfollowing)
# Get follow buttons. This approch will find the follow buttons and ignore the Unfollow/Requested buttons.
follow_buttons = dialog.find_elements_by_xpath(
"//div/div/span/button[text()='Follow']"
)
person_list = []
abort = False
total_list = len(follow_buttons)
while (total_list < amount) and not abort:
amount_left = amount - total_list
before_scroll = total_list
scroll_bottom(browser, dialog, amount_left)
sleep(1)
follow_buttons = dialog.find_elements_by_xpath(
"//div/div/span/button[text()='Follow']"
)
total_list = len(follow_buttons)
abort = before_scroll == total_list
for person in follow_buttons:
if person and hasattr(person, "text") and person.text:
try:
person_list.append(
person.find_element_by_xpath("../../../*")
.find_elements_by_tag_name("a")[1]
.text
)
except IndexError:
pass # Element list is too short to have a [1] element
if amount >= total_list:
amount = total_list
print(user_name + " -> Less users to follow than requested.")
# follow loop
try:
hasSlept = False
btnPerson = list(zip(follow_buttons, person_list))
if is_random:
sample = random.sample(range(0, len(follow_buttons)), amount)
finalBtnPerson = []
for num in sample:
finalBtnPerson.append(btnPerson[num])
else:
finalBtnPerson = btnPerson
for button, person in finalBtnPerson:
if followNum >= amount:
print("--> Total followNum reached: ", followNum)
break
if followNum != 0 and hasSlept == False and followNum % 10 == 0:
if delay < 60:
print("sleeping for about {} seconds".format(delay))
else:
print("sleeping for about {} minutes".format(delay / 60))
sleep(delay)
hasSlept = True
continue
if person not in dont_include:
followNum += 1
# Register this session's followed user for further interaction
person_followed.append(person)
button.send_keys("\n")
log_followed_pool(login, person)
follow_restrict[user_name] = follow_restrict.get(user_name, 0) + 1
print(
"--> Ongoing follow "
+ str(followNum)
+ ", now following: {}".format(person.encode("utf-8"))
)
for callback in callbacks:
callback(person.encode("utf-8"))
sleep(15)
# To only sleep once until there is the next follow
if hasSlept:
hasSlept = False
continue
else:
if is_random:
repickedNum = -1
while repickedNum not in sample and repickedNum != -1:
repickedNum = random.randint(0, len(btnPerson))
sample.append(repickedNum)
finalBtnPerson.append(btnPerson[repickedNum])
continue
except BaseException as e:
print("follow loop error \n", str(e))
return person_followed
|
def follow_through_dialog(
browser,
user_name,
amount,
dont_include,
login,
follow_restrict,
allfollowing,
is_random,
delay,
callbacks=[],
):
followNum = 0
sleep(2)
person_followed = []
if is_random:
# expanding the popultaion for better sampling distribution
amount = amount * 3
# find dialog box
dialog = browser.find_element_by_xpath(
"//div[text()='Followers' or text()='Following']/following-sibling::div"
)
# scroll down the page
scroll_bottom(browser, dialog, allfollowing)
# Get follow buttons. This approch will find the follow buttons and ignore the Unfollow/Requested buttons.
follow_buttons = dialog.find_elements_by_xpath(
"//div/div/span/button[text()='Follow']"
)
person_list = []
abort = False
total_list = len(follow_buttons)
while (total_list < amount) and not abort:
amount_left = amount - total_list
before_scroll = total_list
scroll_bottom(browser, dialog, amount_left)
sleep(1)
follow_buttons = dialog.find_elements_by_xpath(
"//div/div/span/button[text()='Follow']"
)
total_list = len(follow_buttons)
abort = before_scroll == total_list
for person in follow_buttons:
if person and hasattr(person, "text") and person.text:
person_list.append(
person.find_element_by_xpath("../../../*")
.find_elements_by_tag_name("a")[1]
.text
)
if amount >= total_list:
amount = total_list
print(user_name + " -> Less users to follow than requested.")
# follow loop
try:
hasSlept = False
btnPerson = list(zip(follow_buttons, person_list))
if is_random:
sample = random.sample(range(0, len(follow_buttons)), amount)
finalBtnPerson = []
for num in sample:
finalBtnPerson.append(btnPerson[num])
else:
finalBtnPerson = btnPerson
for button, person in finalBtnPerson:
if followNum >= amount:
print("--> Total followNum reached: ", followNum)
break
if followNum != 0 and hasSlept == False and followNum % 10 == 0:
if delay < 60:
print("sleeping for about {} seconds".format(delay))
else:
print("sleeping for about {} minutes".format(delay / 60))
sleep(delay)
hasSlept = True
continue
if person not in dont_include:
followNum += 1
# Register this session's followed user for further interaction
person_followed.append(person)
button.send_keys("\n")
log_followed_pool(login, person)
follow_restrict[user_name] = follow_restrict.get(user_name, 0) + 1
print(
"--> Ongoing follow "
+ str(followNum)
+ ", now following: {}".format(person.encode("utf-8"))
)
for callback in callbacks:
callback(person.encode("utf-8"))
sleep(15)
# To only sleep once until there is the next follow
if hasSlept:
hasSlept = False
continue
else:
if is_random:
repickedNum = -1
while repickedNum not in sample and repickedNum != -1:
repickedNum = random.randint(0, len(btnPerson))
sample.append(repickedNum)
finalBtnPerson.append(btnPerson[repickedNum])
continue
except BaseException as e:
print("follow loop error \n", str(e))
return person_followed
|
https://github.com/timgrossmann/InstaPy/issues/671
|
Traceback (most recent call last):
File "example.py", line 18, in <module>
session.follow_user_followers(['timanderic', 'SuperDeluxe'], amount=10, random=False, sleep_delay=0)
File "/Users/Travis/Documents/GitHub/InstaPy/instapy/instapy.py", line 902, in follow_user_followers
userFollowed += follow_given_user_followers(self.browser, user, amount, self.dont_include, self.username, self.follow_restrict, random, sleep_delay)
File "/Users/Travis/Documents/GitHub/InstaPy/instapy/unfollow_util.py", line 435, in follow_given_user_followers
personFollowed = follow_through_dialog(browser, user_name, amount, dont_include, login, follow_restrict, allfollowing, random, delay, callbacks=[])
File "/Users/Travis/Documents/GitHub/InstaPy/instapy/unfollow_util.py", line 255, in follow_through_dialog
.find_elements_by_tag_name("a")[1].text)
IndexError: list index out of range
|
IndexError
|
def login_user(browser, username, password, switch_language=True):
"""Logins the user with the given username and password"""
browser.get("https://www.instagram.com")
# Changes instagram language to english, to ensure no errors ensue from having the site on a different language
# Might cause problems if the OS language is english
if switch_language:
browser.find_element_by_xpath(
"//footer[@class='_s5vm9']/div[@class='_g7lf5 _9z659']/nav["
"@class='_luodr']/ul[@class='_g8wl6']/li[@class='_538w0'][10]/"
"span[@class='_pqycz _hqmnd']/select[@class='_fsoey']/option"
"[text()='English']"
).click()
# Check if the first div is 'Create an Account' or 'Log In'
login_elem = browser.find_element_by_xpath("//article/div/div/p/a[text()='Log in']")
if login_elem is not None:
action = ActionChains(browser).move_to_element(login_elem).click().perform()
# Enter username and password and logs the user in
# Sometimes the element name isn't 'Username' and 'Password' (valid for placeholder too)
input_username = browser.find_elements_by_xpath("//input[@name='username']")
action = (
ActionChains(browser)
.move_to_element(input_username[0])
.click()
.send_keys(username)
.perform()
)
sleep(1)
input_password = browser.find_elements_by_xpath("//input[@name='password']")
ActionChains(browser).move_to_element(input_password[0]).click().send_keys(
password
).perform()
login_button = browser.find_element_by_xpath("//form/span/button[text()='Log in']")
action = ActionChains(browser).move_to_element(login_button).click().perform()
sleep(5)
# Check if user is logged-in (If there's two 'nav' elements)
nav = browser.find_elements_by_xpath("//nav")
if len(nav) == 2:
return True
else:
return False
|
def login_user(browser, username, password, switch_language=True):
"""Logins the user with the given username and password"""
browser.get("https://www.instagram.com")
# Changes instagram language to english, to ensure no errors ensue from having the site on a different language
# Might cause problems if the OS language is english
if switch_language:
browser.find_element_by_xpath(
"//footer[@class='_oofbn']/div[@class='_mhrsk _pcuq6']/nav["
"@class='_p1gbi']/ul[@class='_fh0f2']/li[@class='_fw3ds'][10]/"
"span[@class='_17z9g _c4mil']/select[@class='_nif11']/option"
"[text()='English']"
).click()
# Check if the first div is 'Create an Account' or 'Log In'
login_elem = browser.find_element_by_xpath("//article/div/div/p/a[text()='Log in']")
if login_elem is not None:
action = ActionChains(browser).move_to_element(login_elem).click().perform()
# Enter username and password and logs the user in
# Sometimes the element name isn't 'Username' and 'Password' (valid for placeholder too)
input_username = browser.find_elements_by_xpath("//input[@name='username']")
action = (
ActionChains(browser)
.move_to_element(input_username[0])
.click()
.send_keys(username)
.perform()
)
sleep(1)
input_password = browser.find_elements_by_xpath("//input[@name='password']")
ActionChains(browser).move_to_element(input_password[0]).click().send_keys(
password
).perform()
login_button = browser.find_element_by_xpath("//form/span/button[text()='Log in']")
action = ActionChains(browser).move_to_element(login_button).click().perform()
sleep(5)
# Check if user is logged-in (If there's two 'nav' elements)
nav = browser.find_elements_by_xpath("//nav")
if len(nav) == 2:
return True
else:
return False
|
https://github.com/timgrossmann/InstaPy/issues/498
|
Traceback (most recent call last):
File "insta_script.py", line 495, in <module>
session.login()
File "/home/pi/Projects/InstaPy/instapy/instapy.py", line 146, in login
if not login_user(self.browser, self.username, self.password, self.switch_language):
File "/home/pi/Projects/InstaPy/instapy/login_util.py", line 13, in login_user
browser.find_element_by_xpath("//footer[@class='_oofbn']/div[@class='_mhrsk _pcuq6']/nav["
File "/usr/local/lib/python2.7/dist-packages/selenium/webdriver/remote/webdriver.py", line 293, in find_element_by_xpath
return self.find_element(by=By.XPATH, value=xpath)
File "/usr/local/lib/python2.7/dist-packages/selenium/webdriver/remote/webdriver.py", line 752, in find_element
'value': value})['value']
File "/usr/local/lib/python2.7/dist-packages/selenium/webdriver/remote/webdriver.py", line 236, in execute
self.error_handler.check_response(response)
File "/usr/local/lib/python2.7/dist-packages/selenium/webdriver/remote/errorhandler.py", line 192, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.NoSuchElementException: Message: Unable to locate element: {"method":"xpath","selector":"//footer[@class='_oofbn']/div[@class='_mhrsk _pcuq6']/nav[@class='_p1gbi']/ul[@class='_fh0f2']/li[@class='_fw3ds'][10]/span[@class='_17z9g _c4mil']/select[@class='_nif11']/option[text()='English']"}
Stacktrace:
at FirefoxDriver.prototype.findElementInternal_ (file:///tmp/tmpKCdbVi/extensions/fxdriver@googlecode.com/components/driver-component.js:10770)
at fxdriver.Timer.prototype.setTimeout/<.notify (file:///tmp/tmpKCdbVi/extensions/fxdriver@googlecode.com/components/driver-component.js:625)
|
selenium.common.exceptions.NoSuchElementException
|
def __init__(self, username=None, password=None, nogui=False):
if nogui:
self.display = Display(visible=0, size=(800, 600))
self.display.start()
chromedriver_location = "./assets/chromedriver"
chrome_options = Options()
chrome_options.add_argument("--dns-prefetch-disable")
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--lang=en-US")
chrome_options.add_experimental_option("prefs", {"intl.accept_languages": "en-US"})
chrome_options.binary_location = chromedriver_location
self.browser = webdriver.Chrome(
chromedriver_location, chrome_options=chrome_options
)
self.browser.implicitly_wait(25)
self.logFile = open("./logs/logFile.txt", "a")
self.logFile.write(
"Session started - %s\n" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
)
self.username = username or environ.get("INSTA_USER")
self.password = password or environ.get("INSTA_PW")
self.nogui = nogui
self.do_comment = False
self.comment_percentage = 0
self.comments = ["Cool!", "Nice!", "Looks good!"]
self.photo_comments = []
self.video_comments = []
self.followed = 0
self.follow_restrict = load_follow_restriction()
self.follow_times = 1
self.do_follow = False
self.follow_percentage = 0
self.dont_include = []
self.automatedFollowedPool = []
self.dont_like = ["sex", "nsfw"]
self.ignore_if_contains = []
self.ignore_users = []
self.use_clarifai = False
self.clarifai_secret = None
self.clarifai_id = None
self.clarifai_img_tags = []
self.clarifai_full_match = False
self.like_by_followers_upper_limit = 0
self.like_by_followers_lower_limit = 0
self.aborting = False
|
def __init__(self, username=None, password=None, nogui=False):
if nogui:
self.display = Display(visible=0, size=(800, 600))
self.display.start()
chrome_options = Options()
chrome_options.add_argument("--dns-prefetch-disable")
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--lang=en-US")
chrome_options.add_experimental_option("prefs", {"intl.accept_languages": "en-US"})
self.browser = webdriver.Chrome(
"./assets/chromedriver", chrome_options=chrome_options
)
self.browser.implicitly_wait(25)
self.logFile = open("./logs/logFile.txt", "a")
self.logFile.write(
"Session started - %s\n" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
)
self.username = username or environ.get("INSTA_USER")
self.password = password or environ.get("INSTA_PW")
self.nogui = nogui
self.do_comment = False
self.comment_percentage = 0
self.comments = ["Cool!", "Nice!", "Looks good!"]
self.photo_comments = []
self.video_comments = []
self.followed = 0
self.follow_restrict = load_follow_restriction()
self.follow_times = 1
self.do_follow = False
self.follow_percentage = 0
self.dont_include = []
self.automatedFollowedPool = []
self.dont_like = ["sex", "nsfw"]
self.ignore_if_contains = []
self.ignore_users = []
self.use_clarifai = False
self.clarifai_secret = None
self.clarifai_id = None
self.clarifai_img_tags = []
self.clarifai_full_match = False
self.like_by_followers_upper_limit = 0
self.like_by_followers_lower_limit = 0
self.aborting = False
|
https://github.com/timgrossmann/InstaPy/issues/338
|
Traceback (most recent call last):
File "LikeSCript.py", line 16, in <module>
.like_by_tags([<My Tags>], amount=300, media='Photo')\
File "/root/InstaPy/instapy/instapy.py", line 386, in like_by_tags
liked = like_image(self.browser)
File "/root/InstaPy/instapy/like_util.py", line 279, in like_image
browser.execute_script("document.getElementsByClassName('" + like_elem[0].get_attribute("class") + "')[0].click()")
File "/usr/local/lib/python3.5/dist-packages/selenium/webdriver/remote/webelement.py", line 111, in get_attribute
resp = self._execute(Command.GET_ELEMENT_ATTRIBUTE, {'name': name})
File "/usr/local/lib/python3.5/dist-packages/selenium/webdriver/remote/webelement.py", line 461, in _execute
return self._parent.execute(command, params)
File "/usr/local/lib/python3.5/dist-packages/selenium/webdriver/remote/webdriver.py", line 236, in execute
self.error_handler.check_response(response)
File "/usr/local/lib/python3.5/dist-packages/selenium/webdriver/remote/errorhandler.py", line 192, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.WebDriverException: Message: chrome not reachable
(Session info: chrome=59.0.3071.109)
(Driver info: chromedriver=2.29.461571 (8a88bbe0775e2a23afda0ceaf2ef7ee74e822cc5),platform=Linux 4.4.0-81-generic x86_64)
|
selenium.common.exceptions.WebDriverException
|
def __init__(self, username=None, password=None, nogui=False):
if nogui:
self.display = Display(visible=0, size=(800, 600))
self.display.start()
chromedriver_location = "./assets/chromedriver"
chrome_options = Options()
chrome_options.add_argument("--dns-prefetch-disable")
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--lang=en-US")
chrome_options.add_experimental_option("prefs", {"intl.accept_languages": "en-US"})
chrome_options.binary_location = chromedriver_location
self.browser = webdriver.Chrome(
chromedriver_location, chrome_options=chrome_options
)
self.browser.implicitly_wait(25)
self.logFile = open("./logs/logFile.txt", "a")
self.logFile.write(
"Session started - %s\n" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
)
self.username = username or environ.get("INSTA_USER")
self.password = password or environ.get("INSTA_PW")
self.nogui = nogui
self.do_comment = False
self.comment_percentage = 0
self.comments = ["Cool!", "Nice!", "Looks good!"]
self.photo_comments = []
self.video_comments = []
self.followed = 0
self.follow_restrict = load_follow_restriction()
self.follow_times = 1
self.do_follow = False
self.follow_percentage = 0
self.dont_include = []
self.automatedFollowedPool = []
self.dont_like = ["sex", "nsfw"]
self.ignore_if_contains = []
self.ignore_users = []
self.use_clarifai = False
self.clarifai_secret = None
self.clarifai_id = None
self.clarifai_img_tags = []
self.clarifai_full_match = False
self.like_by_followers_upper_limit = 0
self.like_by_followers_lower_limit = 0
self.aborting = False
|
def __init__(self, username=None, password=None, nogui=False):
if nogui:
self.display = Display(visible=0, size=(800, 600))
self.display.start()
chrome_options = Options()
chrome_options.add_argument("--dns-prefetch-disable")
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--lang=en-US")
chrome_options.add_experimental_option("prefs", {"intl.accept_languages": "en-US"})
self.browser = webdriver.Chrome(
"./assets/chromedriver", chrome_options=chrome_options
)
self.browser.implicitly_wait(25)
self.logFile = open("./logs/logFile.txt", "a")
self.logFile.write(
"Session started - %s\n" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
)
self.username = username or environ.get("INSTA_USER")
self.password = password or environ.get("INSTA_PW")
self.nogui = nogui
self.do_comment = False
self.comment_percentage = 0
self.comments = ["Cool!", "Nice!", "Looks good!"]
self.photo_comments = []
self.video_comments = []
self.followed = 0
self.follow_restrict = load_follow_restriction()
self.follow_times = 1
self.do_follow = False
self.follow_percentage = 0
self.dont_include = []
self.automatedFollowedPool = []
self.dont_like = ["sex", "nsfw"]
self.ignore_if_contains = []
self.ignore_users = []
self.use_clarifai = False
self.clarifai_secret = None
self.clarifai_id = None
self.clarifai_img_tags = []
self.clarifai_full_match = False
self.like_by_followers_upper_limit = 0
self.like_by_followers_lower_limit = 0
self.aborting = False
|
https://github.com/timgrossmann/InstaPy/issues/338
|
Traceback (most recent call last):
File "LikeSCript.py", line 16, in <module>
.like_by_tags([<My Tags>], amount=300, media='Photo')\
File "/root/InstaPy/instapy/instapy.py", line 386, in like_by_tags
liked = like_image(self.browser)
File "/root/InstaPy/instapy/like_util.py", line 279, in like_image
browser.execute_script("document.getElementsByClassName('" + like_elem[0].get_attribute("class") + "')[0].click()")
File "/usr/local/lib/python3.5/dist-packages/selenium/webdriver/remote/webelement.py", line 111, in get_attribute
resp = self._execute(Command.GET_ELEMENT_ATTRIBUTE, {'name': name})
File "/usr/local/lib/python3.5/dist-packages/selenium/webdriver/remote/webelement.py", line 461, in _execute
return self._parent.execute(command, params)
File "/usr/local/lib/python3.5/dist-packages/selenium/webdriver/remote/webdriver.py", line 236, in execute
self.error_handler.check_response(response)
File "/usr/local/lib/python3.5/dist-packages/selenium/webdriver/remote/errorhandler.py", line 192, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.WebDriverException: Message: chrome not reachable
(Session info: chrome=59.0.3071.109)
(Driver info: chromedriver=2.29.461571 (8a88bbe0775e2a23afda0ceaf2ef7ee74e822cc5),platform=Linux 4.4.0-81-generic x86_64)
|
selenium.common.exceptions.WebDriverException
|
def _get_unitary_matrix(self, unitary):
"""Return the matrix representing a unitary operation.
Args:
unitary (~.Operation): a PennyLane unitary operation
Returns:
array[complex]: Returns a 2D matrix representation of
the unitary in the computational basis, or, in the case of a diagonal unitary,
a 1D array representing the matrix diagonal.
"""
op_name = unitary.name.split(".inv")[0]
if op_name in self.parametric_ops:
if op_name == "MultiRZ":
mat = self.parametric_ops[op_name](*unitary.parameters, len(unitary.wires))
else:
mat = self.parametric_ops[op_name](*unitary.parameters)
if unitary.inverse:
mat = self._transpose(self._conj(mat))
return mat
if isinstance(unitary, DiagonalOperation):
return unitary.eigvals
return unitary.matrix
|
def _get_unitary_matrix(self, unitary):
"""Return the matrix representing a unitary operation.
Args:
unitary (~.Operation): a PennyLane unitary operation
Returns:
array[complex]: Returns a 2D matrix representation of
the unitary in the computational basis, or, in the case of a diagonal unitary,
a 1D array representing the matrix diagonal.
"""
op_name = unitary.name
if op_name in self.parametric_ops:
if op_name == "MultiRZ":
return self.parametric_ops[unitary.name](
*unitary.parameters, len(unitary.wires)
)
return self.parametric_ops[unitary.name](*unitary.parameters)
if isinstance(unitary, DiagonalOperation):
return unitary.eigvals
return unitary.matrix
|
https://github.com/PennyLaneAI/pennylane/issues/1071
|
import pennylane as qml
dev = qml.device('default.qubit', wires=2)
@qml.qnode(dev)
... def circuit(x):
... qml.RZ(x, wires=1).inv()
... return qml.expval(qml.PauliZ(0))
...
circuit(0)
tensor(1., requires_grad=True)
opt = qml.GradientDescentOptimizer(0.1)
x0 = 0.5
opt.step(circuit, x0)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/optimize/gradient_descent.py", line 98, in step
g, _ = self.compute_grad(objective_fn, args, kwargs, grad_fn=grad_fn)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/optimize/gradient_descent.py", line 127, in compute_grad
grad = g(*args, **kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/_grad.py", line 96, in __call__
grad_value, ans = self._get_grad_fn(args)(*args, **kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/autograd/wrap_util.py", line 20, in nary_f
return unary_operator(unary_f, x, *nary_op_args, **nary_op_kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/_grad.py", line 113, in _grad_with_forward
vjp, ans = _make_vjp(fun, x)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/autograd/core.py", line 10, in make_vjp
end_value, end_node = trace(start_node, fun, x)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/autograd/tracer.py", line 10, in trace
end_box = fun(start_box)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/autograd/wrap_util.py", line 15, in unary_f
return fun(*subargs, **kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/tape/qnode.py", line 521, in __call__
res = self.qtape.execute(device=self.device)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/tape/tapes/tape.py", line 1070, in execute
return self._execute(params, device=device)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/tape/tapes/tape.py", line 1101, in execute_device
res = device.execute(self)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/_qubit_device.py", line 204, in execute
self.apply(circuit.operations, rotations=circuit.diagonalizing_gates, **kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/devices/default_qubit.py", line 188, in apply
self._state = self._apply_operation(self._state, operation)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/devices/default_qubit.py", line 213, in _apply_operation
matrix = self._get_unitary_matrix(operation)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/devices/default_qubit_autograd.py", line 167, in _get_unitary_matrix
return unitary.eigvals
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/operation.py", line 878, in eigvals
return super().eigvals
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/operation.py", line 753, in eigvals
op_eigvals = self._eigvals(*self.parameters)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/ops/qubit.py", line 678, in _eigvals
p = cmath.exp(-0.5j * theta)
TypeError: must be real number, not ArrayBox
|
TypeError
|
def _get_unitary_matrix(self, unitary):
"""Return the matrix representing a unitary operation.
Args:
unitary (~.Operation): a PennyLane unitary operation
Returns:
tf.Tensor[complex] or array[complex]: Returns a 2D matrix representation of
the unitary in the computational basis, or, in the case of a diagonal unitary,
a 1D array representing the matrix diagonal. For non-parametric unitaries,
the return type will be a ``np.ndarray``. For parametric unitaries, a ``tf.Tensor``
object will be returned.
"""
op_name = unitary.name.split(".inv")[0]
if op_name in self.parametric_ops:
if op_name == "MultiRZ":
mat = self.parametric_ops[op_name](*unitary.parameters, len(unitary.wires))
else:
mat = self.parametric_ops[op_name](*unitary.parameters)
if unitary.inverse:
mat = self._transpose(self._conj(mat))
return mat
if isinstance(unitary, DiagonalOperation):
return unitary.eigvals
return unitary.matrix
|
def _get_unitary_matrix(self, unitary):
"""Return the matrix representing a unitary operation.
Args:
unitary (~.Operation): a PennyLane unitary operation
Returns:
tf.Tensor[complex] or array[complex]: Returns a 2D matrix representation of
the unitary in the computational basis, or, in the case of a diagonal unitary,
a 1D array representing the matrix diagonal. For non-parametric unitaries,
the return type will be a ``np.ndarray``. For parametric unitaries, a ``tf.Tensor``
object will be returned.
"""
if unitary.name in self.parametric_ops:
if unitary.name == "MultiRZ":
return self.parametric_ops[unitary.name](
unitary.parameters, len(unitary.wires)
)
return self.parametric_ops[unitary.name](*unitary.parameters)
if isinstance(unitary, DiagonalOperation):
return unitary.eigvals
return unitary.matrix
|
https://github.com/PennyLaneAI/pennylane/issues/1071
|
import pennylane as qml
dev = qml.device('default.qubit', wires=2)
@qml.qnode(dev)
... def circuit(x):
... qml.RZ(x, wires=1).inv()
... return qml.expval(qml.PauliZ(0))
...
circuit(0)
tensor(1., requires_grad=True)
opt = qml.GradientDescentOptimizer(0.1)
x0 = 0.5
opt.step(circuit, x0)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/optimize/gradient_descent.py", line 98, in step
g, _ = self.compute_grad(objective_fn, args, kwargs, grad_fn=grad_fn)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/optimize/gradient_descent.py", line 127, in compute_grad
grad = g(*args, **kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/_grad.py", line 96, in __call__
grad_value, ans = self._get_grad_fn(args)(*args, **kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/autograd/wrap_util.py", line 20, in nary_f
return unary_operator(unary_f, x, *nary_op_args, **nary_op_kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/_grad.py", line 113, in _grad_with_forward
vjp, ans = _make_vjp(fun, x)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/autograd/core.py", line 10, in make_vjp
end_value, end_node = trace(start_node, fun, x)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/autograd/tracer.py", line 10, in trace
end_box = fun(start_box)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/autograd/wrap_util.py", line 15, in unary_f
return fun(*subargs, **kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/tape/qnode.py", line 521, in __call__
res = self.qtape.execute(device=self.device)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/tape/tapes/tape.py", line 1070, in execute
return self._execute(params, device=device)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/tape/tapes/tape.py", line 1101, in execute_device
res = device.execute(self)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/_qubit_device.py", line 204, in execute
self.apply(circuit.operations, rotations=circuit.diagonalizing_gates, **kwargs)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/devices/default_qubit.py", line 188, in apply
self._state = self._apply_operation(self._state, operation)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/devices/default_qubit.py", line 213, in _apply_operation
matrix = self._get_unitary_matrix(operation)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/devices/default_qubit_autograd.py", line 167, in _get_unitary_matrix
return unitary.eigvals
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/operation.py", line 878, in eigvals
return super().eigvals
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/operation.py", line 753, in eigvals
op_eigvals = self._eigvals(*self.parameters)
File "/home/nathan/anaconda3/lib/python3.7/site-packages/pennylane/ops/qubit.py", line 678, in _eigvals
p = cmath.exp(-0.5j * theta)
TypeError: must be real number, not ArrayBox
|
TypeError
|
def to_torch(qnode):
"""Function that accepts a :class:`~.QNode`, and returns a PyTorch-compatible QNode.
Args:
qnode (~pennylane.qnode.QNode): a PennyLane QNode
Returns:
torch.autograd.Function: the QNode as a PyTorch autograd function
"""
qnode_interface = getattr(qnode, "interface", None)
if qnode_interface == "torch":
return qnode
if qnode_interface is not None:
qnode = qnode._qnode # pylint: disable=protected-access
class _TorchQNode(torch.autograd.Function):
"""The TorchQNode"""
@staticmethod
def set_trainable(args):
"""Given input arguments to the TorchQNode, determine which arguments
are trainable and which aren't.
Currently, all arguments are assumed to be nondifferentiable by default,
unless the ``torch.tensor`` attribute ``requires_grad`` is set to True.
This method calls the underlying :meth:`set_trainable_args` method of the QNode.
"""
trainable_args = set()
for idx, arg in enumerate(args):
if getattr(arg, "requires_grad", False):
trainable_args.add(idx)
qnode.set_trainable_args(trainable_args)
@staticmethod
def forward(ctx, input_kwargs, *input_):
"""Implements the forward pass QNode evaluation"""
# detach all input tensors, convert to NumPy array
ctx.args = args_to_numpy(input_)
ctx.kwargs = kwargs_to_numpy(input_kwargs)
ctx.save_for_backward(*input_)
# Determine which QNode input tensors require gradients,
# and thus communicate to the QNode which ones must
# be wrapped as PennyLane variables.
_TorchQNode.set_trainable(input_)
# evaluate the QNode
res = qnode(*ctx.args, **ctx.kwargs)
if not isinstance(res, np.ndarray):
# scalar result, cast to NumPy scalar
res = np.array(res)
# if any input tensor uses the GPU, the output should as well
for i in input_:
if isinstance(i, torch.Tensor):
if i.is_cuda: # pragma: no cover
cuda_device = i.get_device()
return torch.as_tensor(
torch.from_numpy(res), device=cuda_device
)
return torch.from_numpy(res)
@staticmethod
@once_differentiable
def backward(ctx, grad_output): # pragma: no cover
"""Implements the backwards pass QNode vector-Jacobian product"""
# NOTE: This method is definitely tested by the `test_torch.py` test suite,
# however does not show up in the coverage. This is likely due to
# subtleties in the torch.autograd.FunctionMeta metaclass, specifically
# the way in which the backward class is created on the fly
# evaluate the Jacobian matrix of the QNode
jacobian = qnode.jacobian(ctx.args, ctx.kwargs)
jacobian = torch.as_tensor(jacobian, dtype=grad_output.dtype).to(
grad_output
)
vjp = torch.transpose(grad_output.view(-1, 1), 0, 1) @ jacobian
vjp = vjp.flatten()
# restore the nested structure of the input args
grad_input_list = unflatten_torch(vjp, ctx.saved_tensors)[0]
grad_input = []
# match the type and device of the input tensors
for i, j in zip(grad_input_list, ctx.saved_tensors):
res = torch.as_tensor(i, dtype=j.dtype)
if j.is_cuda: # pragma: no cover
cuda_device = j.get_device()
res = torch.as_tensor(res, device=cuda_device)
grad_input.append(res)
return (None,) + tuple(grad_input)
class TorchQNode(partial):
"""Torch QNode"""
# pylint: disable=too-few-public-methods
# Here, we are making use of functools.partial to dynamically add
# methods and attributes to the custom gradient method defined below.
# This allows us to provide more useful __str__ and __repr__ methods
# for the decorated function (so it would still look like a QNode to end-users),
# as well as making QNode attributes and methods available.
@property
def interface(self):
"""String representing the QNode interface"""
return "torch"
def __str__(self):
"""String representation"""
detail = "<QNode: device='{}', func={}, wires={}, interface={}>"
return detail.format(
qnode.device.short_name,
qnode.func.__name__,
qnode.num_wires,
self.interface,
)
def __repr__(self):
"""REPL representation"""
return self.__str__()
# Bind QNode methods
print_applied = qnode.print_applied
jacobian = qnode.jacobian
metric_tensor = qnode.metric_tensor
draw = qnode.draw
func = qnode.func
set_trainable_args = qnode.set_trainable_args
get_trainable_args = qnode.get_trainable_args
_qnode = qnode
# Bind QNode attributes. Note that attributes must be
# bound as properties; by making use of closure, we ensure
# that updates to the wrapped QNode attributes are reflected
# by the wrapper class.
arg_vars = property(lambda self: qnode.arg_vars)
num_variables = property(lambda self: qnode.num_variables)
par_to_grad_method = property(lambda self: qnode.par_to_grad_method)
@TorchQNode
def custom_apply(*args, **kwargs):
"""Custom apply wrapper, to allow passing kwargs to the TorchQNode"""
# get default kwargs that weren't passed
keyword_sig = _get_default_args(qnode.func)
keyword_defaults = {k: v[1] for k, v in keyword_sig.items()}
# keyword_positions = {v[0]: k for k, v in keyword_sig.items()}
# create a keyword_values dict, that contains defaults
# and any user-passed kwargs
keyword_values = {}
keyword_values.update(keyword_defaults)
keyword_values.update(kwargs)
# sort keyword values into a list of args, using their position
# [keyword_values[k] for k in sorted(keyword_positions, key=keyword_positions.get)]
return _TorchQNode.apply(keyword_values, *args)
return custom_apply
|
def to_torch(qnode):
"""Function that accepts a :class:`~.QNode`, and returns a PyTorch-compatible QNode.
Args:
qnode (~pennylane.qnode.QNode): a PennyLane QNode
Returns:
torch.autograd.Function: the QNode as a PyTorch autograd function
"""
qnode_interface = getattr(qnode, "interface", None)
if qnode_interface == "torch":
return qnode
if qnode_interface is not None:
qnode = qnode._qnode # pylint: disable=protected-access
class _TorchQNode(torch.autograd.Function):
"""The TorchQNode"""
@staticmethod
def set_trainable(args):
"""Given input arguments to the TorchQNode, determine which arguments
are trainable and which aren't.
Currently, all arguments are assumed to be nondifferentiable by default,
unless the ``torch.tensor`` attribute ``requires_grad`` is set to True.
This method calls the underlying :meth:`set_trainable_args` method of the QNode.
"""
trainable_args = set()
for idx, arg in enumerate(args):
if getattr(arg, "requires_grad", False):
trainable_args.add(idx)
qnode.set_trainable_args(trainable_args)
@staticmethod
def forward(ctx, input_kwargs, *input_):
"""Implements the forward pass QNode evaluation"""
# detach all input tensors, convert to NumPy array
ctx.args = args_to_numpy(input_)
ctx.kwargs = kwargs_to_numpy(input_kwargs)
ctx.save_for_backward(*input_)
# Determine which QNode input tensors require gradients,
# and thus communicate to the QNode which ones must
# be wrapped as PennyLane variables.
_TorchQNode.set_trainable(input_)
# evaluate the QNode
res = qnode(*ctx.args, **ctx.kwargs)
if not isinstance(res, np.ndarray):
# scalar result, cast to NumPy scalar
res = np.array(res)
# if any input tensor uses the GPU, the output should as well
for i in input_:
if isinstance(i, torch.Tensor):
if i.is_cuda: # pragma: no cover
cuda_device = i.get_device()
return torch.as_tensor(
torch.from_numpy(res), device=cuda_device
)
return torch.from_numpy(res)
@staticmethod
@once_differentiable
def backward(ctx, grad_output): # pragma: no cover
"""Implements the backwards pass QNode vector-Jacobian product"""
# NOTE: This method is definitely tested by the `test_torch.py` test suite,
# however does not show up in the coverage. This is likely due to
# subtleties in the torch.autograd.FunctionMeta metaclass, specifically
# the way in which the backward class is created on the fly
# evaluate the Jacobian matrix of the QNode
jacobian = qnode.jacobian(ctx.args, ctx.kwargs)
jacobian = torch.as_tensor(jacobian, dtype=grad_output.dtype)
vjp = torch.transpose(grad_output.view(-1, 1), 0, 1) @ jacobian
vjp = vjp.flatten()
# restore the nested structure of the input args
grad_input_list = unflatten_torch(vjp, ctx.saved_tensors)[0]
grad_input = []
# match the type and device of the input tensors
for i, j in zip(grad_input_list, ctx.saved_tensors):
res = torch.as_tensor(i, dtype=j.dtype)
if j.is_cuda: # pragma: no cover
cuda_device = j.get_device()
res = torch.as_tensor(res, device=cuda_device)
grad_input.append(res)
return (None,) + tuple(grad_input)
class TorchQNode(partial):
"""Torch QNode"""
# pylint: disable=too-few-public-methods
# Here, we are making use of functools.partial to dynamically add
# methods and attributes to the custom gradient method defined below.
# This allows us to provide more useful __str__ and __repr__ methods
# for the decorated function (so it would still look like a QNode to end-users),
# as well as making QNode attributes and methods available.
@property
def interface(self):
"""String representing the QNode interface"""
return "torch"
def __str__(self):
"""String representation"""
detail = "<QNode: device='{}', func={}, wires={}, interface={}>"
return detail.format(
qnode.device.short_name,
qnode.func.__name__,
qnode.num_wires,
self.interface,
)
def __repr__(self):
"""REPL representation"""
return self.__str__()
# Bind QNode methods
print_applied = qnode.print_applied
jacobian = qnode.jacobian
metric_tensor = qnode.metric_tensor
draw = qnode.draw
func = qnode.func
set_trainable_args = qnode.set_trainable_args
get_trainable_args = qnode.get_trainable_args
_qnode = qnode
# Bind QNode attributes. Note that attributes must be
# bound as properties; by making use of closure, we ensure
# that updates to the wrapped QNode attributes are reflected
# by the wrapper class.
arg_vars = property(lambda self: qnode.arg_vars)
num_variables = property(lambda self: qnode.num_variables)
par_to_grad_method = property(lambda self: qnode.par_to_grad_method)
@TorchQNode
def custom_apply(*args, **kwargs):
"""Custom apply wrapper, to allow passing kwargs to the TorchQNode"""
# get default kwargs that weren't passed
keyword_sig = _get_default_args(qnode.func)
keyword_defaults = {k: v[1] for k, v in keyword_sig.items()}
# keyword_positions = {v[0]: k for k, v in keyword_sig.items()}
# create a keyword_values dict, that contains defaults
# and any user-passed kwargs
keyword_values = {}
keyword_values.update(keyword_defaults)
keyword_values.update(kwargs)
# sort keyword values into a list of args, using their position
# [keyword_values[k] for k in sorted(keyword_positions, key=keyword_positions.get)]
return _TorchQNode.apply(keyword_values, *args)
return custom_apply
|
https://github.com/PennyLaneAI/pennylane/issues/709
|
Traceback (most recent call last):
File "qdqn.py", line 328, in <module>
loss.backward() #computes gradient of loss with respect to all weights n biases in the policy net
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/tensor.py", line 198, in backward
torch.autograd.backward(self, gradient, retain_graph, create_graph)
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/autograd/__init__.py", line 98, in backward
Variable._execution_engine.run_backward(
RuntimeError: Expected object of device type cuda but got device type cpu for argument #2 'mat2' in call to _th_mm
|
RuntimeError
|
def backward(ctx, grad_output): # pragma: no cover
"""Implements the backwards pass QNode vector-Jacobian product"""
# NOTE: This method is definitely tested by the `test_torch.py` test suite,
# however does not show up in the coverage. This is likely due to
# subtleties in the torch.autograd.FunctionMeta metaclass, specifically
# the way in which the backward class is created on the fly
# evaluate the Jacobian matrix of the QNode
jacobian = qnode.jacobian(ctx.args, ctx.kwargs)
jacobian = torch.as_tensor(jacobian, dtype=grad_output.dtype).to(grad_output)
vjp = torch.transpose(grad_output.view(-1, 1), 0, 1) @ jacobian
vjp = vjp.flatten()
# restore the nested structure of the input args
grad_input_list = unflatten_torch(vjp, ctx.saved_tensors)[0]
grad_input = []
# match the type and device of the input tensors
for i, j in zip(grad_input_list, ctx.saved_tensors):
res = torch.as_tensor(i, dtype=j.dtype)
if j.is_cuda: # pragma: no cover
cuda_device = j.get_device()
res = torch.as_tensor(res, device=cuda_device)
grad_input.append(res)
return (None,) + tuple(grad_input)
|
def backward(ctx, grad_output): # pragma: no cover
"""Implements the backwards pass QNode vector-Jacobian product"""
# NOTE: This method is definitely tested by the `test_torch.py` test suite,
# however does not show up in the coverage. This is likely due to
# subtleties in the torch.autograd.FunctionMeta metaclass, specifically
# the way in which the backward class is created on the fly
# evaluate the Jacobian matrix of the QNode
jacobian = qnode.jacobian(ctx.args, ctx.kwargs)
jacobian = torch.as_tensor(jacobian, dtype=grad_output.dtype)
vjp = torch.transpose(grad_output.view(-1, 1), 0, 1) @ jacobian
vjp = vjp.flatten()
# restore the nested structure of the input args
grad_input_list = unflatten_torch(vjp, ctx.saved_tensors)[0]
grad_input = []
# match the type and device of the input tensors
for i, j in zip(grad_input_list, ctx.saved_tensors):
res = torch.as_tensor(i, dtype=j.dtype)
if j.is_cuda: # pragma: no cover
cuda_device = j.get_device()
res = torch.as_tensor(res, device=cuda_device)
grad_input.append(res)
return (None,) + tuple(grad_input)
|
https://github.com/PennyLaneAI/pennylane/issues/709
|
Traceback (most recent call last):
File "qdqn.py", line 328, in <module>
loss.backward() #computes gradient of loss with respect to all weights n biases in the policy net
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/tensor.py", line 198, in backward
torch.autograd.backward(self, gradient, retain_graph, create_graph)
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/autograd/__init__.py", line 98, in backward
Variable._execution_engine.run_backward(
RuntimeError: Expected object of device type cuda but got device type cpu for argument #2 'mat2' in call to _th_mm
|
RuntimeError
|
def _evaluate_qnode(self, x):
"""Evaluates the QNode for a single input datapoint.
Args:
x (tensor): the datapoint
Returns:
tensor: output datapoint
"""
if qml.tape_mode_active():
return self._evaluate_qnode_tape_mode(x)
qnode = self.qnode
for arg in self.sig:
if arg is not self.input_arg: # Non-input arguments must always be positional
w = self.qnode_weights[arg].to(x)
qnode = functools.partial(qnode, w)
else:
if self.input_is_default: # The input argument can be positional or keyword
qnode = functools.partial(qnode, **{self.input_arg: x})
else:
qnode = functools.partial(qnode, x)
return qnode().type(x.dtype)
|
def _evaluate_qnode(self, x):
"""Evaluates the QNode for a single input datapoint.
Args:
x (tensor): the datapoint
Returns:
tensor: output datapoint
"""
if qml.tape_mode_active():
return self._evaluate_qnode_tape_mode(x)
qnode = self.qnode
for arg in self.sig:
if arg is not self.input_arg: # Non-input arguments must always be positional
w = self.qnode_weights[arg]
qnode = functools.partial(qnode, w)
else:
if self.input_is_default: # The input argument can be positional or keyword
qnode = functools.partial(qnode, **{self.input_arg: x})
else:
qnode = functools.partial(qnode, x)
return qnode().type(x.dtype)
|
https://github.com/PennyLaneAI/pennylane/issues/709
|
Traceback (most recent call last):
File "qdqn.py", line 328, in <module>
loss.backward() #computes gradient of loss with respect to all weights n biases in the policy net
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/tensor.py", line 198, in backward
torch.autograd.backward(self, gradient, retain_graph, create_graph)
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/autograd/__init__.py", line 98, in backward
Variable._execution_engine.run_backward(
RuntimeError: Expected object of device type cuda but got device type cpu for argument #2 'mat2' in call to _th_mm
|
RuntimeError
|
def _evaluate_qnode_tape_mode(self, x):
"""Evaluates a tape-mode QNode for a single input datapoint.
Args:
x (tensor): the datapoint
Returns:
tensor: output datapoint
"""
kwargs = {
**{self.input_arg: x},
**{arg: weight.to(x) for arg, weight in self.qnode_weights.items()},
}
return self.qnode(**kwargs).type(x.dtype)
|
def _evaluate_qnode_tape_mode(self, x):
"""Evaluates a tape-mode QNode for a single input datapoint.
Args:
x (tensor): the datapoint
Returns:
tensor: output datapoint
"""
kwargs = {**{self.input_arg: x}, **self.qnode_weights}
return self.qnode(**kwargs).type(x.dtype)
|
https://github.com/PennyLaneAI/pennylane/issues/709
|
Traceback (most recent call last):
File "qdqn.py", line 328, in <module>
loss.backward() #computes gradient of loss with respect to all weights n biases in the policy net
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/tensor.py", line 198, in backward
torch.autograd.backward(self, gradient, retain_graph, create_graph)
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/autograd/__init__.py", line 98, in backward
Variable._execution_engine.run_backward(
RuntimeError: Expected object of device type cuda but got device type cpu for argument #2 'mat2' in call to _th_mm
|
RuntimeError
|
def backward(ctx, grad_output): # pragma: no cover
"""Implements the backwards pass QNode vector-Jacobian product"""
tape = ctx.kwargs["tape"]
device = ctx.kwargs["device"]
tape.set_parameters(ctx.all_params_unwrapped, trainable_only=False)
jacobian = tape.jacobian(device, params=ctx.args, **tape.jacobian_options)
tape.set_parameters(ctx.all_params, trainable_only=False)
jacobian = torch.as_tensor(jacobian, dtype=grad_output.dtype).to(grad_output)
vjp = grad_output.view(1, -1) @ jacobian
grad_input_list = torch.unbind(vjp.flatten())
grad_input = []
# match the type and device of the input tensors
for i, j in zip(grad_input_list, ctx.saved_tensors):
res = torch.as_tensor(i, dtype=tape.dtype)
if j.is_cuda: # pragma: no cover
cuda_device = j.get_device()
res = torch.as_tensor(res, device=cuda_device)
grad_input.append(res)
return (None,) + tuple(grad_input)
|
def backward(ctx, grad_output): # pragma: no cover
"""Implements the backwards pass QNode vector-Jacobian product"""
tape = ctx.kwargs["tape"]
device = ctx.kwargs["device"]
tape.set_parameters(ctx.all_params_unwrapped, trainable_only=False)
jacobian = tape.jacobian(device, params=ctx.args, **tape.jacobian_options)
tape.set_parameters(ctx.all_params, trainable_only=False)
jacobian = torch.as_tensor(jacobian, dtype=grad_output.dtype)
vjp = grad_output.view(1, -1) @ jacobian
grad_input_list = torch.unbind(vjp.flatten())
grad_input = []
# match the type and device of the input tensors
for i, j in zip(grad_input_list, ctx.saved_tensors):
res = torch.as_tensor(i, dtype=tape.dtype)
if j.is_cuda: # pragma: no cover
cuda_device = j.get_device()
res = torch.as_tensor(res, device=cuda_device)
grad_input.append(res)
return (None,) + tuple(grad_input)
|
https://github.com/PennyLaneAI/pennylane/issues/709
|
Traceback (most recent call last):
File "qdqn.py", line 328, in <module>
loss.backward() #computes gradient of loss with respect to all weights n biases in the policy net
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/tensor.py", line 198, in backward
torch.autograd.backward(self, gradient, retain_graph, create_graph)
File "/home/ubuntu/anaconda3/envs/gymm/lib/python3.8/site-packages/torch/autograd/__init__.py", line 98, in backward
Variable._execution_engine.run_backward(
RuntimeError: Expected object of device type cuda but got device type cpu for argument #2 'mat2' in call to _th_mm
|
RuntimeError
|
def __init__(self, *params, wires=None, do_queue=True):
super().__init__(*params, wires=wires, do_queue=True)
pauli_word = params[1]
if not PauliRot._check_pauli_word(pauli_word):
raise ValueError(
'The given Pauli word "{}" contains characters that are not allowed.'
" Allowed characters are I, X, Y and Z".format(pauli_word)
)
num_wires = 1 if isinstance(wires, int) else len(wires)
if not len(pauli_word) == num_wires:
raise ValueError(
"The given Pauli word has length {}, length {} was expected for wires {}".format(
len(pauli_word), num_wires, wires
)
)
|
def __init__(self, *params, wires=None, do_queue=True):
super().__init__(*params, wires=wires, do_queue=True)
pauli_word = params[1]
if not PauliRot._check_pauli_word(pauli_word):
raise ValueError(
'The given Pauli word "{}" contains characters that are not allowed.'
" Allowed characters are I, X, Y and Z".format(pauli_word)
)
if not len(pauli_word) == len(wires):
raise ValueError(
"The given Pauli word has length {}, length {} was expected for wires {}".format(
len(pauli_word), len(wires), wires
)
)
|
https://github.com/PennyLaneAI/pennylane/issues/856
|
----------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-27-611f77c1af9c> in <module>
12 return qml.expval(qml.PauliX(1))
13
---> 14 print(apply_pauli_rots(0.345))
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/interfaces/autograd.py in __call__(self, *args, **kwargs)
67 self.set_trainable(args)
68 args = autograd.builtins.tuple(args) # pylint: disable=no-member
---> 69 return self.evaluate(args, kwargs)
70
71 @staticmethod
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/autograd/tracer.py in f_wrapped(*args, **kwargs)
46 return new_box(ans, trace, node)
47 else:
---> 48 return f_raw(*args, **kwargs)
49 f_wrapped.fun = f_raw
50 f_wrapped._is_autograd_primitive = True
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in evaluate(self, args, kwargs)
817
818 if self.circuit is None or self.mutable:
--> 819 self._construct(args, kwargs)
820
821 self.device.reset()
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/jacobian.py in _construct(self, args, kwargs)
87 for each positional parameter.
88 """
---> 89 super()._construct(args, kwargs)
90 self.par_to_grad_method = {k: self._best_method(k) for k in self.variable_deps}
91
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _construct(self, args, kwargs)
574
575 # check the validity of the circuit
--> 576 self._check_circuit(res)
577 del self.queue
578 del self.obs_queue
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _check_circuit(self, res)
731 if self.device.operations:
732 # replace operations in the queue with any decompositions if required
--> 733 queue = decompose_queue(self.queue, self.device)
734
735 self.ops = queue + list(res)
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in decompose_queue(ops, device)
123 for op in ops:
124 try:
--> 125 new_ops.extend(_decompose_queue([op], device))
126 except NotImplementedError:
127 raise qml.DeviceError(
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _decompose_queue(ops, device)
98 new_ops.append(op)
99 else:
--> 100 decomposed_ops = op.decomposition(*op.data, wires=op.wires)
101 if op.inverse:
102 decomposed_ops = qml.inv(decomposed_ops)
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/templates/decorator.py in wrapper(*args, **kwargs)
59 def wrapper(*args, **kwargs):
60 with OperationRecorder() as rec:
---> 61 func(*args, **kwargs)
62
63 return rec.queue
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/ops/qubit.py in decomposition(theta, pauli_word, wires)
925 @template
926 def decomposition(theta, pauli_word, wires):
--> 927 active_wires, active_gates = zip(
928 *[(wire, gate) for wire, gate in zip(wires, pauli_word) if gate != "I"]
929 )
ValueError: not enough values to unpack (expected 2, got 0)
|
ValueError
|
def _matrix(cls, *params):
theta = params[0]
pauli_word = params[1]
if not PauliRot._check_pauli_word(pauli_word):
raise ValueError(
'The given Pauli word "{}" contains characters that are not allowed.'
" Allowed characters are I, X, Y and Z".format(pauli_word)
)
# Simplest case is if the Pauli is the identity matrix
if pauli_word == "I" * len(pauli_word):
return np.exp(-1j * theta / 2) * np.eye(2 ** len(pauli_word))
# We first generate the matrix excluding the identity parts and expand it afterwards.
# To this end, we have to store on which wires the non-identity parts act
non_identity_wires, non_identity_gates = zip(
*[(wire, gate) for wire, gate in enumerate(pauli_word) if gate != "I"]
)
multi_Z_rot_matrix = MultiRZ._matrix(theta, len(non_identity_gates))
# now we conjugate with Hadamard and RX to create the Pauli string
conjugation_matrix = functools.reduce(
np.kron,
[PauliRot._PAULI_CONJUGATION_MATRICES[gate] for gate in non_identity_gates],
)
return expand(
conjugation_matrix.T.conj() @ multi_Z_rot_matrix @ conjugation_matrix,
non_identity_wires,
list(range(len(pauli_word))),
)
|
def _matrix(cls, *params):
theta = params[0]
pauli_word = params[1]
if not PauliRot._check_pauli_word(pauli_word):
raise ValueError(
'The given Pauli word "{}" contains characters that are not allowed.'
" Allowed characters are I, X, Y and Z".format(pauli_word)
)
# We first generate the matrix excluding the identity parts and expand it afterwards.
# To this end, we have to store on which wires the non-identity parts act
non_identity_wires, non_identity_gates = zip(
*[(wire, gate) for wire, gate in enumerate(pauli_word) if gate != "I"]
)
multi_Z_rot_matrix = MultiRZ._matrix(theta, len(non_identity_gates))
# now we conjugate with Hadamard and RX to create the Pauli string
conjugation_matrix = functools.reduce(
np.kron,
[PauliRot._PAULI_CONJUGATION_MATRICES[gate] for gate in non_identity_gates],
)
return expand(
conjugation_matrix.T.conj() @ multi_Z_rot_matrix @ conjugation_matrix,
non_identity_wires,
list(range(len(pauli_word))),
)
|
https://github.com/PennyLaneAI/pennylane/issues/856
|
----------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-27-611f77c1af9c> in <module>
12 return qml.expval(qml.PauliX(1))
13
---> 14 print(apply_pauli_rots(0.345))
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/interfaces/autograd.py in __call__(self, *args, **kwargs)
67 self.set_trainable(args)
68 args = autograd.builtins.tuple(args) # pylint: disable=no-member
---> 69 return self.evaluate(args, kwargs)
70
71 @staticmethod
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/autograd/tracer.py in f_wrapped(*args, **kwargs)
46 return new_box(ans, trace, node)
47 else:
---> 48 return f_raw(*args, **kwargs)
49 f_wrapped.fun = f_raw
50 f_wrapped._is_autograd_primitive = True
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in evaluate(self, args, kwargs)
817
818 if self.circuit is None or self.mutable:
--> 819 self._construct(args, kwargs)
820
821 self.device.reset()
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/jacobian.py in _construct(self, args, kwargs)
87 for each positional parameter.
88 """
---> 89 super()._construct(args, kwargs)
90 self.par_to_grad_method = {k: self._best_method(k) for k in self.variable_deps}
91
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _construct(self, args, kwargs)
574
575 # check the validity of the circuit
--> 576 self._check_circuit(res)
577 del self.queue
578 del self.obs_queue
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _check_circuit(self, res)
731 if self.device.operations:
732 # replace operations in the queue with any decompositions if required
--> 733 queue = decompose_queue(self.queue, self.device)
734
735 self.ops = queue + list(res)
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in decompose_queue(ops, device)
123 for op in ops:
124 try:
--> 125 new_ops.extend(_decompose_queue([op], device))
126 except NotImplementedError:
127 raise qml.DeviceError(
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _decompose_queue(ops, device)
98 new_ops.append(op)
99 else:
--> 100 decomposed_ops = op.decomposition(*op.data, wires=op.wires)
101 if op.inverse:
102 decomposed_ops = qml.inv(decomposed_ops)
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/templates/decorator.py in wrapper(*args, **kwargs)
59 def wrapper(*args, **kwargs):
60 with OperationRecorder() as rec:
---> 61 func(*args, **kwargs)
62
63 return rec.queue
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/ops/qubit.py in decomposition(theta, pauli_word, wires)
925 @template
926 def decomposition(theta, pauli_word, wires):
--> 927 active_wires, active_gates = zip(
928 *[(wire, gate) for wire, gate in zip(wires, pauli_word) if gate != "I"]
929 )
ValueError: not enough values to unpack (expected 2, got 0)
|
ValueError
|
def _eigvals(cls, theta, pauli_word):
# Identity must be treated specially because its eigenvalues are all the same
if pauli_word == "I" * len(pauli_word):
return np.exp(-1j * theta / 2) * np.ones(2 ** len(pauli_word))
return MultiRZ._eigvals(theta, len(pauli_word))
|
def _eigvals(cls, theta, pauli_word):
return MultiRZ._eigvals(theta, len(pauli_word))
|
https://github.com/PennyLaneAI/pennylane/issues/856
|
----------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-27-611f77c1af9c> in <module>
12 return qml.expval(qml.PauliX(1))
13
---> 14 print(apply_pauli_rots(0.345))
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/interfaces/autograd.py in __call__(self, *args, **kwargs)
67 self.set_trainable(args)
68 args = autograd.builtins.tuple(args) # pylint: disable=no-member
---> 69 return self.evaluate(args, kwargs)
70
71 @staticmethod
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/autograd/tracer.py in f_wrapped(*args, **kwargs)
46 return new_box(ans, trace, node)
47 else:
---> 48 return f_raw(*args, **kwargs)
49 f_wrapped.fun = f_raw
50 f_wrapped._is_autograd_primitive = True
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in evaluate(self, args, kwargs)
817
818 if self.circuit is None or self.mutable:
--> 819 self._construct(args, kwargs)
820
821 self.device.reset()
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/jacobian.py in _construct(self, args, kwargs)
87 for each positional parameter.
88 """
---> 89 super()._construct(args, kwargs)
90 self.par_to_grad_method = {k: self._best_method(k) for k in self.variable_deps}
91
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _construct(self, args, kwargs)
574
575 # check the validity of the circuit
--> 576 self._check_circuit(res)
577 del self.queue
578 del self.obs_queue
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _check_circuit(self, res)
731 if self.device.operations:
732 # replace operations in the queue with any decompositions if required
--> 733 queue = decompose_queue(self.queue, self.device)
734
735 self.ops = queue + list(res)
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in decompose_queue(ops, device)
123 for op in ops:
124 try:
--> 125 new_ops.extend(_decompose_queue([op], device))
126 except NotImplementedError:
127 raise qml.DeviceError(
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _decompose_queue(ops, device)
98 new_ops.append(op)
99 else:
--> 100 decomposed_ops = op.decomposition(*op.data, wires=op.wires)
101 if op.inverse:
102 decomposed_ops = qml.inv(decomposed_ops)
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/templates/decorator.py in wrapper(*args, **kwargs)
59 def wrapper(*args, **kwargs):
60 with OperationRecorder() as rec:
---> 61 func(*args, **kwargs)
62
63 return rec.queue
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/ops/qubit.py in decomposition(theta, pauli_word, wires)
925 @template
926 def decomposition(theta, pauli_word, wires):
--> 927 active_wires, active_gates = zip(
928 *[(wire, gate) for wire, gate in zip(wires, pauli_word) if gate != "I"]
929 )
ValueError: not enough values to unpack (expected 2, got 0)
|
ValueError
|
def decomposition(theta, pauli_word, wires):
# Catch cases when the wire is passed as a single int.
if isinstance(wires, int):
wires = [wires]
# Check for identity and do nothing
if pauli_word == "I" * len(wires):
return
active_wires, active_gates = zip(
*[(wire, gate) for wire, gate in zip(wires, pauli_word) if gate != "I"]
)
for wire, gate in zip(active_wires, active_gates):
if gate == "X":
Hadamard(wires=[wire])
elif gate == "Y":
RX(np.pi / 2, wires=[wire])
MultiRZ(theta, wires=list(active_wires))
for wire, gate in zip(active_wires, active_gates):
if gate == "X":
Hadamard(wires=[wire])
elif gate == "Y":
RX(-np.pi / 2, wires=[wire])
|
def decomposition(theta, pauli_word, wires):
active_wires, active_gates = zip(
*[(wire, gate) for wire, gate in zip(wires, pauli_word) if gate != "I"]
)
for wire, gate in zip(active_wires, active_gates):
if gate == "X":
Hadamard(wires=[wire])
elif gate == "Y":
RX(np.pi / 2, wires=[wire])
MultiRZ(theta, wires=list(active_wires))
for wire, gate in zip(active_wires, active_gates):
if gate == "X":
Hadamard(wires=[wire])
elif gate == "Y":
RX(-np.pi / 2, wires=[wire])
|
https://github.com/PennyLaneAI/pennylane/issues/856
|
----------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-27-611f77c1af9c> in <module>
12 return qml.expval(qml.PauliX(1))
13
---> 14 print(apply_pauli_rots(0.345))
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/interfaces/autograd.py in __call__(self, *args, **kwargs)
67 self.set_trainable(args)
68 args = autograd.builtins.tuple(args) # pylint: disable=no-member
---> 69 return self.evaluate(args, kwargs)
70
71 @staticmethod
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/autograd/tracer.py in f_wrapped(*args, **kwargs)
46 return new_box(ans, trace, node)
47 else:
---> 48 return f_raw(*args, **kwargs)
49 f_wrapped.fun = f_raw
50 f_wrapped._is_autograd_primitive = True
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in evaluate(self, args, kwargs)
817
818 if self.circuit is None or self.mutable:
--> 819 self._construct(args, kwargs)
820
821 self.device.reset()
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/jacobian.py in _construct(self, args, kwargs)
87 for each positional parameter.
88 """
---> 89 super()._construct(args, kwargs)
90 self.par_to_grad_method = {k: self._best_method(k) for k in self.variable_deps}
91
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _construct(self, args, kwargs)
574
575 # check the validity of the circuit
--> 576 self._check_circuit(res)
577 del self.queue
578 del self.obs_queue
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _check_circuit(self, res)
731 if self.device.operations:
732 # replace operations in the queue with any decompositions if required
--> 733 queue = decompose_queue(self.queue, self.device)
734
735 self.ops = queue + list(res)
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in decompose_queue(ops, device)
123 for op in ops:
124 try:
--> 125 new_ops.extend(_decompose_queue([op], device))
126 except NotImplementedError:
127 raise qml.DeviceError(
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/qnodes/base.py in _decompose_queue(ops, device)
98 new_ops.append(op)
99 else:
--> 100 decomposed_ops = op.decomposition(*op.data, wires=op.wires)
101 if op.inverse:
102 decomposed_ops = qml.inv(decomposed_ops)
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/templates/decorator.py in wrapper(*args, **kwargs)
59 def wrapper(*args, **kwargs):
60 with OperationRecorder() as rec:
---> 61 func(*args, **kwargs)
62
63 return rec.queue
~/Software/anaconda3/envs/test-xanadu/lib/python3.8/site-packages/pennylane/ops/qubit.py in decomposition(theta, pauli_word, wires)
925 @template
926 def decomposition(theta, pauli_word, wires):
--> 927 active_wires, active_gates = zip(
928 *[(wire, gate) for wire, gate in zip(wires, pauli_word) if gate != "I"]
929 )
ValueError: not enough values to unpack (expected 2, got 0)
|
ValueError
|
def run_wsgi(self):
if self.headers.get("Expect", "").lower().strip() == "100-continue":
self.wfile.write(b"HTTP/1.1 100 Continue\r\n\r\n")
self.environ = environ = self.make_environ()
headers_set = []
headers_sent = []
def write(data):
assert headers_set, "write() before start_response"
if not headers_sent:
status, response_headers = headers_sent[:] = headers_set
try:
code, msg = status.split(None, 1)
except ValueError:
code, msg = status, ""
code = int(code)
self.send_response(code, msg)
header_keys = set()
for key, value in response_headers:
self.send_header(key, value)
key = key.lower()
header_keys.add(key)
if not (
"content-length" in header_keys
or environ["REQUEST_METHOD"] == "HEAD"
or code < 200
or code in (204, 304)
):
self.close_connection = True
self.send_header("Connection", "close")
if "server" not in header_keys:
self.send_header("Server", self.version_string())
if "date" not in header_keys:
self.send_header("Date", self.date_time_string())
self.end_headers()
assert isinstance(data, bytes), "applications must write bytes"
if data:
# Only write data if there is any to avoid Python 3.5 SSL bug
self.wfile.write(data)
self.wfile.flush()
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if headers_sent:
reraise(*exc_info)
finally:
exc_info = None
elif headers_set:
raise AssertionError("Headers already set")
headers_set[:] = [status, response_headers]
return write
def execute(app):
application_iter = app(environ, start_response)
try:
for data in application_iter:
write(data)
if not headers_sent:
write(b"")
finally:
if hasattr(application_iter, "close"):
application_iter.close()
application_iter = None
try:
execute(self.server.app)
except (_ConnectionError, socket.timeout) as e:
self.connection_dropped(e, environ)
except Exception:
if self.server.passthrough_errors:
raise
from .debug.tbtools import get_current_traceback
traceback = get_current_traceback(ignore_system_exceptions=True)
try:
# if we haven't yet sent the headers but they are set
# we roll back to be able to set them again.
if not headers_sent:
del headers_set[:]
execute(InternalServerError())
except Exception:
pass
self.server.log("error", "Error on request:\n%s", traceback.plaintext)
|
def run_wsgi(self):
if self.headers.get("Expect", "").lower().strip() == "100-continue":
self.wfile.write(b"HTTP/1.1 100 Continue\r\n\r\n")
self.environ = environ = self.make_environ()
headers_set = []
headers_sent = []
def write(data):
assert headers_set, "write() before start_response"
if not headers_sent:
status, response_headers = headers_sent[:] = headers_set
try:
code, msg = status.split(None, 1)
except ValueError:
code, msg = status, ""
code = int(code)
self.send_response(code, msg)
header_keys = set()
for key, value in response_headers:
self.send_header(key, value)
key = key.lower()
header_keys.add(key)
if not (
"content-length" in header_keys
or environ["REQUEST_METHOD"] == "HEAD"
or code < 200
or code in (204, 304)
):
self.close_connection = True
self.send_header("Connection", "close")
if "server" not in header_keys:
self.send_header("Server", self.version_string())
if "date" not in header_keys:
self.send_header("Date", self.date_time_string())
self.end_headers()
assert isinstance(data, bytes), "applications must write bytes"
self.wfile.write(data)
self.wfile.flush()
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if headers_sent:
reraise(*exc_info)
finally:
exc_info = None
elif headers_set:
raise AssertionError("Headers already set")
headers_set[:] = [status, response_headers]
return write
def execute(app):
application_iter = app(environ, start_response)
try:
for data in application_iter:
write(data)
if not headers_sent:
write(b"")
finally:
if hasattr(application_iter, "close"):
application_iter.close()
application_iter = None
try:
execute(self.server.app)
except (_ConnectionError, socket.timeout) as e:
self.connection_dropped(e, environ)
except Exception:
if self.server.passthrough_errors:
raise
from .debug.tbtools import get_current_traceback
traceback = get_current_traceback(ignore_system_exceptions=True)
try:
# if we haven't yet sent the headers but they are set
# we roll back to be able to set them again.
if not headers_sent:
del headers_set[:]
execute(InternalServerError())
except Exception:
pass
self.server.log("error", "Error on request:\n%s", traceback.plaintext)
|
https://github.com/pallets/werkzeug/issues/1659
|
Error on request:
Traceback (most recent call last):
File "/home/relent95/.local/lib/python3.5/site-packages/werkzeug/serving.py", line 303, in run_wsgi
execute(self.server.app)
File "/home/relent95/.local/lib/python3.5/site-packages/werkzeug/serving.py", line 294, in execute
write(data)
File "/home/relent95/.local/lib/python3.5/site-packages/werkzeug/serving.py", line 275, in write
self.wfile.write(data)
File "/usr/lib/python3.5/socket.py", line 593, in write
return self._sock.send(b)
File "/usr/lib/python3.5/ssl.py", line 861, in send
return self._sslobj.write(data)
File "/usr/lib/python3.5/ssl.py", line 586, in write
return self._sslobj.write(data)
ssl.SSLEOFError: EOF occurred in violation of protocol (_ssl.c:1848)
|
ssl.SSLEOFError
|
def write(data):
assert headers_set, "write() before start_response"
if not headers_sent:
status, response_headers = headers_sent[:] = headers_set
try:
code, msg = status.split(None, 1)
except ValueError:
code, msg = status, ""
code = int(code)
self.send_response(code, msg)
header_keys = set()
for key, value in response_headers:
self.send_header(key, value)
key = key.lower()
header_keys.add(key)
if not (
"content-length" in header_keys
or environ["REQUEST_METHOD"] == "HEAD"
or code < 200
or code in (204, 304)
):
self.close_connection = True
self.send_header("Connection", "close")
if "server" not in header_keys:
self.send_header("Server", self.version_string())
if "date" not in header_keys:
self.send_header("Date", self.date_time_string())
self.end_headers()
assert isinstance(data, bytes), "applications must write bytes"
if data:
# Only write data if there is any to avoid Python 3.5 SSL bug
self.wfile.write(data)
self.wfile.flush()
|
def write(data):
assert headers_set, "write() before start_response"
if not headers_sent:
status, response_headers = headers_sent[:] = headers_set
try:
code, msg = status.split(None, 1)
except ValueError:
code, msg = status, ""
code = int(code)
self.send_response(code, msg)
header_keys = set()
for key, value in response_headers:
self.send_header(key, value)
key = key.lower()
header_keys.add(key)
if not (
"content-length" in header_keys
or environ["REQUEST_METHOD"] == "HEAD"
or code < 200
or code in (204, 304)
):
self.close_connection = True
self.send_header("Connection", "close")
if "server" not in header_keys:
self.send_header("Server", self.version_string())
if "date" not in header_keys:
self.send_header("Date", self.date_time_string())
self.end_headers()
assert isinstance(data, bytes), "applications must write bytes"
self.wfile.write(data)
self.wfile.flush()
|
https://github.com/pallets/werkzeug/issues/1659
|
Error on request:
Traceback (most recent call last):
File "/home/relent95/.local/lib/python3.5/site-packages/werkzeug/serving.py", line 303, in run_wsgi
execute(self.server.app)
File "/home/relent95/.local/lib/python3.5/site-packages/werkzeug/serving.py", line 294, in execute
write(data)
File "/home/relent95/.local/lib/python3.5/site-packages/werkzeug/serving.py", line 275, in write
self.wfile.write(data)
File "/usr/lib/python3.5/socket.py", line 593, in write
return self._sock.send(b)
File "/usr/lib/python3.5/ssl.py", line 861, in send
return self._sslobj.write(data)
File "/usr/lib/python3.5/ssl.py", line 586, in write
return self._sslobj.write(data)
ssl.SSLEOFError: EOF occurred in violation of protocol (_ssl.c:1848)
|
ssl.SSLEOFError
|
def _compile_builder(self, append_unknown=True):
defaults = self.defaults or {}
dom_ops = []
url_ops = []
opl = dom_ops
for is_dynamic, data in self._trace:
if data == "|" and opl is dom_ops:
opl = url_ops
continue
# this seems like a silly case to ever come up but:
# if a default is given for a value that appears in the rule,
# resolve it to a constant ahead of time
if is_dynamic and data in defaults:
data = self._converters[data].to_url(defaults[data])
opl.append((False, data))
elif not is_dynamic:
opl.append(
(False, url_quote(to_bytes(data, self.map.charset), safe="/:|+"))
)
else:
opl.append((True, data))
def _convert(elem):
ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem))
ret.args = [ast.Name(str(elem), ast.Load())] # str for py2
return ret
def _parts(ops):
parts = [
_convert(elem) if is_dynamic else ast.Str(s=elem)
for is_dynamic, elem in ops
]
parts = parts or [ast.Str("")]
# constant fold
ret = [parts[0]]
for p in parts[1:]:
if isinstance(p, ast.Str) and isinstance(ret[-1], ast.Str):
ret[-1] = ast.Str(ret[-1].s + p.s)
else:
ret.append(p)
return ret
dom_parts = _parts(dom_ops)
url_parts = _parts(url_ops)
if not append_unknown:
body = []
else:
body = [_IF_KWARGS_URL_ENCODE_AST]
url_parts.extend(_URL_ENCODE_AST_NAMES)
def _join(parts):
if len(parts) == 1: # shortcut
return parts[0]
elif hasattr(ast, "JoinedStr"): # py36+
return ast.JoinedStr(parts)
else:
call = _prefix_names('"".join()')
call.args = [ast.Tuple(parts, ast.Load())]
return call
body.append(ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load())))
# str is necessary for python2
pargs = [
str(elem)
for is_dynamic, elem in dom_ops + url_ops
if is_dynamic and elem not in defaults
]
kargs = [str(k) for k in defaults]
func_ast = _prefix_names("def _(): pass")
func_ast.name = "<builder:{!r}>".format(self.rule)
if hasattr(ast, "arg"): # py3
func_ast.args.args.append(ast.arg(".self", None))
for arg in pargs + kargs:
func_ast.args.args.append(ast.arg(arg, None))
func_ast.args.kwarg = ast.arg(".kwargs", None)
else:
func_ast.args.args.append(ast.Name(".self", ast.Load()))
for arg in pargs + kargs:
func_ast.args.args.append(ast.Name(arg, ast.Load()))
func_ast.args.kwarg = ".kwargs"
for _ in kargs:
func_ast.args.defaults.append(ast.Str(""))
func_ast.body = body
# use `ast.parse` instead of `ast.Module` for better portability
# python3.8 changes the signature of `ast.Module`
module = ast.parse("")
module.body = [func_ast]
module = ast.fix_missing_locations(module)
code = compile(module, "<werkzeug routing>", "exec")
return self._get_func_code(code, func_ast.name)
|
def _compile_builder(self, append_unknown=True):
defaults = self.defaults or {}
dom_ops = []
url_ops = []
opl = dom_ops
for is_dynamic, data in self._trace:
if data == "|" and opl is dom_ops:
opl = url_ops
continue
# this seems like a silly case to ever come up but:
# if a default is given for a value that appears in the rule,
# resolve it to a constant ahead of time
if is_dynamic and data in defaults:
data = self._converters[data].to_url(defaults[data])
opl.append((False, data))
elif not is_dynamic:
opl.append(
(False, url_quote(to_bytes(data, self.map.charset), safe="/:|+"))
)
else:
opl.append((True, data))
def _convert(elem):
ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem))
ret.args = [ast.Name(str(elem), ast.Load())] # str for py2
return ret
def _parts(ops):
parts = [
_convert(elem) if is_dynamic else ast.Str(s=elem)
for is_dynamic, elem in ops
]
parts = parts or [ast.Str("")]
# constant fold
ret = [parts[0]]
for p in parts[1:]:
if isinstance(p, ast.Str) and isinstance(ret[-1], ast.Str):
ret[-1] = ast.Str(ret[-1].s + p.s)
else:
ret.append(p)
return ret
dom_parts = _parts(dom_ops)
url_parts = _parts(url_ops)
if not append_unknown:
body = []
else:
body = [_IF_KWARGS_URL_ENCODE_AST]
url_parts.extend(_URL_ENCODE_AST_NAMES)
def _join(parts):
if len(parts) == 1: # shortcut
return parts[0]
elif hasattr(ast, "JoinedStr"): # py36+
return ast.JoinedStr(parts)
else:
call = _prefix_names('"".join()')
call.args = [ast.Tuple(parts, ast.Load())]
return call
body.append(ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load())))
# str is necessary for python2
pargs = [
str(elem)
for is_dynamic, elem in dom_ops + url_ops
if is_dynamic and elem not in defaults
]
kargs = [str(k) for k in defaults]
func_ast = _prefix_names("def _(): pass")
func_ast.name = "<builder:{!r}>".format(self.rule)
if hasattr(ast, "arg"): # py3
func_ast.args.args.append(ast.arg(".self", None))
for arg in pargs + kargs:
func_ast.args.args.append(ast.arg(arg, None))
func_ast.args.kwarg = ast.arg(".kwargs", None)
else:
func_ast.args.args.append(ast.Name(".self", ast.Load()))
for arg in pargs + kargs:
func_ast.args.args.append(ast.Name(arg, ast.Load()))
func_ast.args.kwarg = ".kwargs"
for _ in kargs:
func_ast.args.defaults.append(ast.Str(""))
func_ast.body = body
module = ast.fix_missing_locations(ast.Module([func_ast]))
code = compile(module, "<werkzeug routing>", "exec")
return self._get_func_code(code, func_ast.name)
|
https://github.com/pallets/werkzeug/issues/1551
|
flask run
* Serving Flask app "flask_test.py"
* Environment: production
WARNING: This is a development server. Do not use it in a production deployment.
Use a production WSGI server instead.
* Debug mode: off
Traceback (most recent call last):
File "d:\tool\python\38\lib\runpy.py", line 192, in _run_module_as_main
return _run_code(code, main_globals, None,
File "d:\tool\python\38\lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "d:\tool\python\38\Scripts\flask.exe\__main__.py", line 9, in <module>
File "d:\tool\python\38\lib\site-packages\flask\cli.py", line 906, in main
cli.main(args=args, prog_name=name)
File "d:\tool\python\38\lib\site-packages\flask\cli.py", line 569, in main
return super(FlaskGroup, self).main(*args, **kwargs)
File "d:\tool\python\38\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "d:\tool\python\38\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "d:\tool\python\38\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "d:\tool\python\38\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "d:\tool\python\38\lib\site-packages\click\decorators.py", line 64, in new_func
return ctx.invoke(f, obj, *args, **kwargs)
File "d:\tool\python\38\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "d:\tool\python\38\lib\site-packages\flask\cli.py", line 779, in run_command
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
File "d:\tool\python\38\lib\site-packages\flask\cli.py", line 300, in __init__
self._load_unlocked()
File "d:\tool\python\38\lib\site-packages\flask\cli.py", line 324, in _load_unlocked
self._app = rv = self.loader()
File "d:\tool\python\38\lib\site-packages\flask\cli.py", line 381, in load_app
app = locate_app(self, import_name, name)
File "d:\tool\python\38\lib\site-packages\flask\cli.py", line 236, in locate_app
__import__(module_name)
File "D:\flask_test.py", line 3, in <module>
app = Flask(__name__)
File "d:\tool\python\38\lib\site-packages\flask\app.py", line 559, in __init__
self.add_url_rule(
File "d:\tool\python\38\lib\site-packages\flask\app.py", line 67, in wrapper_func
return f(self, *args, **kwargs)
File "d:\tool\python\38\lib\site-packages\flask\app.py", line 1217, in add_url_rule
self.url_map.add(rule)
File "d:\tool\python\38\lib\site-packages\werkzeug\routing.py", line 1388, in add
rule.bind(self)
File "d:\tool\python\38\lib\site-packages\werkzeug\routing.py", line 730, in bind
self.compile()
File "d:\tool\python\38\lib\site-packages\werkzeug\routing.py", line 794, in compile
self._build = self._compile_builder(False).__get__(self, None)
File "d:\tool\python\38\lib\site-packages\werkzeug\routing.py", line 951, in _compile_builder
code = compile(module, "<werkzeug routing>", "exec")
TypeError: required field "type_ignores" missing from Module
|
TypeError
|
def _compile_builder(self, append_unknown=True):
defaults = self.defaults or {}
dom_ops = []
url_ops = []
opl = dom_ops
for is_dynamic, data in self._trace:
if data == "|" and opl is dom_ops:
opl = url_ops
continue
# this seems like a silly case to ever come up but:
# if a default is given for a value that appears in the rule,
# resolve it to a constant ahead of time
if is_dynamic and data in defaults:
data = self._converters[data].to_url(defaults[data])
opl.append((False, data))
elif not is_dynamic:
opl.append(
(False, url_quote(to_bytes(data, self.map.charset), safe="/:|+"))
)
else:
opl.append((True, data))
def _convert(elem):
ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem))
ret.args = [ast.Name(str(elem), ast.Load())] # str for py2
return ret
def _parts(ops):
parts = [
_convert(elem) if is_dynamic else ast.Str(s=elem)
for is_dynamic, elem in ops
]
parts = parts or [ast.Str("")]
# constant fold
ret = [parts[0]]
for p in parts[1:]:
if isinstance(p, ast.Str) and isinstance(ret[-1], ast.Str):
ret[-1] = ast.Str(ret[-1].s + p.s)
else:
ret.append(p)
return ret
dom_parts = _parts(dom_ops)
url_parts = _parts(url_ops)
if not append_unknown:
body = []
else:
body = [_IF_KWARGS_URL_ENCODE_AST]
url_parts.extend(_URL_ENCODE_AST_NAMES)
def _join(parts):
if len(parts) == 1: # shortcut
return parts[0]
elif hasattr(ast, "JoinedStr"): # py36+
return ast.JoinedStr(parts)
else:
call = _prefix_names('"".join()')
call.args = [ast.Tuple(parts, ast.Load())]
return call
body.append(ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load())))
# str is necessary for python2
pargs = [
str(elem)
for is_dynamic, elem in dom_ops + url_ops
if is_dynamic and elem not in defaults
]
kargs = [str(k) for k in defaults]
func_ast = _prefix_names("def _(): pass")
func_ast.name = "<builder:{!r}>".format(self.rule)
if hasattr(ast, "arg"): # py3
func_ast.args.args.append(ast.arg(".self", None))
for arg in pargs + kargs:
func_ast.args.args.append(ast.arg(arg, None))
func_ast.args.kwarg = ast.arg(".kwargs", None)
else:
func_ast.args.args.append(ast.Name(".self", ast.Load()))
for arg in pargs + kargs:
func_ast.args.args.append(ast.Name(arg, ast.Load()))
func_ast.args.kwarg = ".kwargs"
for _ in kargs:
func_ast.args.defaults.append(ast.Str(""))
func_ast.body = body
module = ast.fix_missing_locations(ast.Module([func_ast]))
code = compile(module, "<werkzeug routing>", "exec")
return self._get_func_code(code, func_ast.name)
|
def _compile_builder(self, append_unknown=True):
defaults = self.defaults or {}
dom_ops = []
url_ops = []
opl = dom_ops
for is_dynamic, data in self._trace:
if data == "|" and opl is dom_ops:
opl = url_ops
continue
# this seems like a silly case to ever come up but:
# if a default is given for a value that appears in the rule,
# resolve it to a constant ahead of time
if is_dynamic and data in defaults:
data = self._converters[data].to_url(defaults[data])
opl.append((False, data))
elif not is_dynamic:
opl.append(
(False, url_quote(to_bytes(data, self.map.charset), safe="/:|+"))
)
else:
opl.append((True, data))
def _convert(elem):
ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem))
ret.args = [ast.Name(str(elem), ast.Load())] # str for py2
return ret
def _parts(ops):
parts = [
_convert(elem) if is_dynamic else ast.Str(s=elem)
for is_dynamic, elem in ops
]
parts = parts or [ast.Str("")]
# constant fold
ret = [parts[0]]
for p in parts[1:]:
if isinstance(p, ast.Str) and isinstance(ret[-1], ast.Str):
ret[-1] = ast.Str(ret[-1].s + p.s)
else:
ret.append(p)
return ret
dom_parts = _parts(dom_ops)
url_parts = _parts(url_ops)
if not append_unknown:
body = []
else:
body = [_IF_KWARGS_URL_ENCODE_AST]
url_parts.extend(_URL_ENCODE_AST_NAMES)
def _join(parts):
if len(parts) == 1: # shortcut
return parts[0]
elif hasattr(ast, "JoinedStr"): # py36+
return ast.JoinedStr(parts)
else:
call = _prefix_names('"".join()')
call.args = [ast.Tuple(parts, ast.Load())]
return call
body.append(ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load())))
# str is necessary for python2
pargs = [
str(elem)
for is_dynamic, elem in dom_ops + url_ops
if is_dynamic and elem not in defaults
]
kargs = [str(k) for k in defaults]
func_ast = _prefix_names("def _(): pass")
func_ast.name = "<builder:{!r}>".format(self.rule)
if hasattr(ast, "arg"): # py3
func_ast.args.args.append(ast.arg(".self", None))
for arg in pargs + kargs:
func_ast.args.args.append(ast.arg(arg, None))
func_ast.args.kwarg = ast.arg(".kwargs", None)
else:
func_ast.args.args.append(ast.Name(".self", ast.Load()))
for arg in pargs + kargs:
func_ast.args.args.append(ast.Name(arg, ast.Load()))
func_ast.args.kwarg = ".kwargs"
for _ in kargs:
func_ast.args.defaults.append(ast.Str(""))
func_ast.body = body
module = ast.fix_missing_locations(ast.Module([func_ast]))
code = compile(module, "<werkzeug routing>", "exec")
globs, locs = {}, {}
exec(code, globs, locs)
return locs[func_ast.name]
|
https://github.com/pallets/werkzeug/issues/1544
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib64/python2.7/site-packages/werkzeug/routing.py", line 948
exec(code, globs, locs)
SyntaxError: unqualified exec is not allowed in function '_compile_builder' it contains a nested function with free variables
|
SyntaxError
|
def __init__(self, data=None, secret_key=None, new=True):
ModificationTrackingDict.__init__(self, data or ())
# explicitly convert it into a bytestring because python 2.6
# no longer performs an implicit string conversion on hmac
if secret_key is not None:
secret_key = to_bytes(secret_key, "utf-8")
self.secret_key = secret_key
self.new = new
if self.serialization_method is pickle:
warnings.warn(
"The default SecureCookie.serialization_method will change from pickle"
" to json in 1.0. To upgrade existing tokens, override unquote to try"
" pickle if json fails."
)
|
def __init__(self, data=None, secret_key=None, new=True):
ModificationTrackingDict.__init__(self, data or ())
# explicitly convert it into a bytestring because python 2.6
# no longer performs an implicit string conversion on hmac
if secret_key is not None:
secret_key = to_bytes(secret_key, "utf-8")
self.secret_key = secret_key
self.new = new
|
https://github.com/pallets/werkzeug/issues/953
|
Traceback (most recent call last):
File "test_secure_cookie.py", line 10, in <module>
print(c.serialize())
File "/home/tonyo/reps/github/werkzeug/werkzeug/contrib/securecookie.py", line 224, in serialize
self.quote(value).decode('ascii')
File "/home/tonyo/reps/github/werkzeug/werkzeug/contrib/securecookie.py", line 183, in quote
value = b''.join(base64.b64encode(value).splitlines()).strip()
File "/home/tonyo/reps/github/werkzeug/env3/lib/python3.4/base64.py", line 62, in b64encode
encoded = binascii.b2a_base64(s)[:-1]
TypeError: 'str' does not support the buffer interface
|
TypeError
|
def quote(cls, value):
"""Quote the value for the cookie. This can be any object supported
by :attr:`serialization_method`.
:param value: the value to quote.
"""
if cls.serialization_method is not None:
value = cls.serialization_method.dumps(value)
if cls.quote_base64:
value = b"".join(base64.b64encode(to_bytes(value, "utf8")).splitlines()).strip()
return value
|
def quote(cls, value):
"""Quote the value for the cookie. This can be any object supported
by :attr:`serialization_method`.
:param value: the value to quote.
"""
if cls.serialization_method is not None:
value = cls.serialization_method.dumps(value)
if cls.quote_base64:
value = b"".join(base64.b64encode(value).splitlines()).strip()
return value
|
https://github.com/pallets/werkzeug/issues/953
|
Traceback (most recent call last):
File "test_secure_cookie.py", line 10, in <module>
print(c.serialize())
File "/home/tonyo/reps/github/werkzeug/werkzeug/contrib/securecookie.py", line 224, in serialize
self.quote(value).decode('ascii')
File "/home/tonyo/reps/github/werkzeug/werkzeug/contrib/securecookie.py", line 183, in quote
value = b''.join(base64.b64encode(value).splitlines()).strip()
File "/home/tonyo/reps/github/werkzeug/env3/lib/python3.4/base64.py", line 62, in b64encode
encoded = binascii.b2a_base64(s)[:-1]
TypeError: 'str' does not support the buffer interface
|
TypeError
|
def add_arguments(self, parser):
self.instances = {}
if self.subcommands:
subparsers = parser.add_subparsers(dest=self.subcommand_dest)
for command, cls in self.subcommands.items():
instance = cls(self.stdout._out, self.stderr._out)
instance.style = self.style
parser_sub = subparsers.add_parser(
cmd=self,
name=instance.command_name,
help=instance.help_string,
description=instance.help_string,
)
add_builtin_arguments(parser=parser_sub)
instance.add_arguments(parser_sub)
self.instances[command] = instance
|
def add_arguments(self, parser):
self.instances = {}
if self.subcommands:
subparsers = parser.add_subparsers(dest=self.subcommand_dest)
for command, cls in self.subcommands.items():
instance = cls(self.stdout, self.stderr)
instance.style = self.style
parser_sub = subparsers.add_parser(
cmd=self,
name=instance.command_name,
help=instance.help_string,
description=instance.help_string,
)
add_builtin_arguments(parser=parser_sub)
instance.add_arguments(parser_sub)
self.instances[command] = instance
|
https://github.com/django-cms/django-cms/issues/5490
|
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/.../www/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/.../www/lib/python2.7/site-packages/django/core/management/__init__.py", line 346, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 394, in run_from_argv
self.execute(*args, **cmd_options)
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 445, in execute
output = self.handle(*args, **options)
File "/.../www/lib/python2.7/site-packages/cms/management/commands/subcommands/base.py", line 86, in handle
command.handle(*args, **options)
File "/.../www/lib/python2.7/site-packages/cms/management/commands/subcommands/publisher_publish.py", line 47, in handle
self.stdout.write('%d.\t%s %s [%d]\n' % (index + 1, m, force_text(page), page.id))
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 114, in write
self._out.write(force_str(style_func(msg)))
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 111, in write
if ending and not msg.endswith(ending):
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 20: ordinal not in range(128)
|
UnicodeDecodeError
|
def handle(self, *args, **options):
if options[self.subcommand_dest] in self.instances:
command = self.instances[options[self.subcommand_dest]]
if options.get("no_color"):
command.style = no_style()
command.stderr.style_func = None
if options.get("stdout"):
command.stdout._out = options.get("stdout")
if options.get("stderr"):
command.stderr._out = options.get("stderr")
command.handle(*args, **options)
else:
self.print_help("manage.py", "cms")
|
def handle(self, *args, **options):
if options[self.subcommand_dest] in self.instances:
command = self.instances[options[self.subcommand_dest]]
if options.get("no_color"):
command.style = no_style()
command.stderr.style_func = None
if options.get("stdout"):
command.stdout = OutputWrapper(options["stdout"])
if options.get("stderr"):
command.stderr = OutputWrapper(
options.get("stderr"), command.stderr.style_func
)
command.handle(*args, **options)
else:
self.print_help("manage.py", "cms")
|
https://github.com/django-cms/django-cms/issues/5490
|
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/.../www/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/.../www/lib/python2.7/site-packages/django/core/management/__init__.py", line 346, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 394, in run_from_argv
self.execute(*args, **cmd_options)
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 445, in execute
output = self.handle(*args, **options)
File "/.../www/lib/python2.7/site-packages/cms/management/commands/subcommands/base.py", line 86, in handle
command.handle(*args, **options)
File "/.../www/lib/python2.7/site-packages/cms/management/commands/subcommands/publisher_publish.py", line 47, in handle
self.stdout.write('%d.\t%s %s [%d]\n' % (index + 1, m, force_text(page), page.id))
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 114, in write
self._out.write(force_str(style_func(msg)))
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 111, in write
if ending and not msg.endswith(ending):
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 20: ordinal not in range(128)
|
UnicodeDecodeError
|
def handle_label(self, label, **options):
queryset = Page.objects.filter(application_urls=label)
number_of_apphooks = queryset.count()
if number_of_apphooks > 0:
if options.get("interactive"):
confirm = input(
"""
You have requested to remove %d '%s' apphooks.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """
% (number_of_apphooks, label)
)
else:
confirm = "yes"
if confirm == "yes":
queryset.update(application_urls=None)
self.stdout.write(
"%d '%s' apphooks uninstalled\n" % (number_of_apphooks, label)
)
else:
self.stdout.write("no '%s' apphooks found\n" % label)
|
def handle_label(self, label, **options):
queryset = Page.objects.filter(application_urls=label)
number_of_apphooks = queryset.count()
if number_of_apphooks > 0:
if options.get("interactive"):
confirm = input(
"""
You have requested to remove %d %r apphooks.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """
% (number_of_apphooks, label)
)
else:
confirm = "yes"
if confirm == "yes":
queryset.update(application_urls=None)
self.stdout.write(
"%d %r apphooks uninstalled\n" % (number_of_apphooks, label)
)
else:
self.stdout.write("no %r apphooks found\n" % label)
|
https://github.com/django-cms/django-cms/issues/5490
|
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/.../www/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/.../www/lib/python2.7/site-packages/django/core/management/__init__.py", line 346, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 394, in run_from_argv
self.execute(*args, **cmd_options)
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 445, in execute
output = self.handle(*args, **options)
File "/.../www/lib/python2.7/site-packages/cms/management/commands/subcommands/base.py", line 86, in handle
command.handle(*args, **options)
File "/.../www/lib/python2.7/site-packages/cms/management/commands/subcommands/publisher_publish.py", line 47, in handle
self.stdout.write('%d.\t%s %s [%d]\n' % (index + 1, m, force_text(page), page.id))
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 114, in write
self._out.write(force_str(style_func(msg)))
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 111, in write
if ending and not msg.endswith(ending):
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 20: ordinal not in range(128)
|
UnicodeDecodeError
|
def handle_label(self, label, **options):
plugin_pool.get_all_plugins()
queryset = CMSPlugin.objects.filter(plugin_type=label)
number_of_plugins = queryset.count()
if number_of_plugins > 0:
if options.get("interactive"):
confirm = input(
"""
You have requested to remove %d '%s' plugins.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """
% (number_of_plugins, label)
)
else:
confirm = "yes"
if confirm == "yes":
queryset.delete()
self.stdout.write(
"%d '%s' plugins uninstalled\n" % (number_of_plugins, label)
)
else:
self.stdout.write("Aborted")
else:
self.stdout.write("no '%s' plugins found\n" % label)
|
def handle_label(self, label, **options):
plugin_pool.get_all_plugins()
queryset = CMSPlugin.objects.filter(plugin_type=label)
number_of_plugins = queryset.count()
if number_of_plugins > 0:
if options.get("interactive"):
confirm = input(
"""
You have requested to remove %d %r plugins.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """
% (number_of_plugins, label)
)
else:
confirm = "yes"
if confirm == "yes":
queryset.delete()
self.stdout.write(
"%d %r plugins uninstalled\n" % (number_of_plugins, label)
)
else:
self.stdout.write("Aborted")
else:
self.stdout.write("no %r plugins found\n" % label)
|
https://github.com/django-cms/django-cms/issues/5490
|
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/.../www/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line
utility.execute()
File "/.../www/lib/python2.7/site-packages/django/core/management/__init__.py", line 346, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 394, in run_from_argv
self.execute(*args, **cmd_options)
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 445, in execute
output = self.handle(*args, **options)
File "/.../www/lib/python2.7/site-packages/cms/management/commands/subcommands/base.py", line 86, in handle
command.handle(*args, **options)
File "/.../www/lib/python2.7/site-packages/cms/management/commands/subcommands/publisher_publish.py", line 47, in handle
self.stdout.write('%d.\t%s %s [%d]\n' % (index + 1, m, force_text(page), page.id))
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 114, in write
self._out.write(force_str(style_func(msg)))
File "/.../www/lib/python2.7/site-packages/django/core/management/base.py", line 111, in write
if ending and not msg.endswith(ending):
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 20: ordinal not in range(128)
|
UnicodeDecodeError
|
def save_model(self, request, obj, form, change):
"""
Move the page in the tree if necessary and save every placeholder
Content object.
"""
target = request.GET.get("target", None)
position = request.GET.get("position", None)
if "recover" in request.path_info:
pk = obj.pk
if obj.parent_id:
try:
parent = Page.objects.get(pk=obj.parent_id)
except Page.DoesNotExist:
parent = None
else:
parent = None
obj.pk = None
obj.path = None
obj.numchild = 0
obj.depth = 0
if parent:
saved_obj = parent.add_child(instance=obj)
else:
saved_obj = obj.add_root(instance=obj)
tmp_pk = saved_obj.pk
saved_obj.pk = pk
Page.objects.get(pk=tmp_pk).delete()
saved_obj.save(no_signals=True)
else:
if "history" in request.path_info:
old_obj = Page.objects.get(pk=obj.pk)
obj.depth = old_obj.depth
obj.parent_id = old_obj.parent_id
obj.path = old_obj.path
obj.numchild = old_obj.numchild
new = False
if not obj.pk:
new = True
obj.save()
if "recover" in request.path_info or "history" in request.path_info:
revert_plugins(request, obj.version.pk, obj)
if target is not None and position is not None:
try:
target = self.model.objects.get(pk=target)
except self.model.DoesNotExist:
pass
else:
if position == "last-child" or position == "first-child":
obj.parent_id = target.pk
else:
obj.parent_id = target.parent_id
obj.save()
obj = obj.move(target, pos=position)
page_type_id = form.cleaned_data.get("page_type")
copy_target_id = request.GET.get("copy_target")
if copy_target_id or page_type_id:
if page_type_id:
copy_target_id = page_type_id
copy_target = Page.objects.get(pk=copy_target_id)
if not copy_target.has_view_permission(request):
raise PermissionDenied()
obj = obj.reload()
copy_target._copy_attributes(obj, clean=True)
obj.save()
for lang in copy_target.languages.split(","):
copy_target._copy_contents(obj, lang)
if not "permission" in request.path_info:
language = form.cleaned_data["language"]
Title.objects.set_or_create(
request,
obj,
form,
language,
)
# is it home? publish it right away
if new and Page.objects.filter(site_id=obj.site_id).count() == 1:
obj.publish(language)
|
def save_model(self, request, obj, form, change):
"""
Move the page in the tree if necessary and save every placeholder
Content object.
"""
target = request.GET.get("target", None)
position = request.GET.get("position", None)
if "recover" in request.path_info:
pk = obj.pk
if obj.parent_id:
try:
parent = Page.objects.get(pk=obj.parent_id)
except Page.DoesNotExist:
parent = None
else:
parent = None
obj.pk = None
obj.path = None
obj.numchild = 0
obj.depth = 0
if parent:
saved_obj = parent.add_child(instance=obj)
else:
saved_obj = obj.add_root(instance=obj)
tmp_pk = saved_obj.pk
saved_obj.pk = pk
Page.objects.get(pk=tmp_pk).delete()
saved_obj.save(no_signals=True)
else:
if "history" in request.path_info:
old_obj = Page.objects.get(pk=obj.pk)
obj.depth = old_obj.depth
obj.parent_id = old_obj.parent_id
obj.path = old_obj.path
obj.numchild = old_obj.numchild
new = False
if not obj.pk:
new = True
obj.save()
if "recover" in request.path_info or "history" in request.path_info:
revert_plugins(request, obj.version.pk, obj)
if target is not None and position is not None:
try:
target = self.model.objects.get(pk=target)
except self.model.DoesNotExist:
pass
else:
if position == "last-child" or position == "first-child":
obj.parent_id = target.pk
else:
obj.parent_id = target.parent_id
obj.save()
obj = obj.move(target, pos=position)
page_type_id = form.cleaned_data.get("page_type")
copy_target_id = request.GET.get("copy_target")
if copy_target_id or page_type_id:
if page_type_id:
copy_target_id = page_type_id
copy_target = Page.objects.get(pk=copy_target_id)
if not copy_target.has_view_permission(request):
raise PermissionDenied()
obj = Page.objects.get(pk=obj.pk) # mptt reload
copy_target._copy_attributes(obj, clean=True)
obj.save()
for lang in copy_target.languages.split(","):
copy_target._copy_contents(obj, lang)
if not "permission" in request.path_info:
language = form.cleaned_data["language"]
Title.objects.set_or_create(
request,
obj,
form,
language,
)
# is it home? publish it right away
if new and Page.objects.filter(site_id=obj.site_id).count() == 1:
obj.publish(language)
|
https://github.com/django-cms/django-cms/issues/4102
|
Traceback (most recent call last):
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/utils/decorators.py", line 105, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/views/decorators/cache.py", line 52, in _wrapped_view_func
response = view_func(request, *args, **kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/contrib/admin/sites.py", line 206, in inner
return view(request, *args, **kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/db/transaction.py", line 394, in inner
return func(*args, **kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/cms/admin/pageadmin.py", line 848, in move_page
page.move_page(target, position)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/cms/models/pagemodel.py", line 216, in move_page
moved_page = self.move(target, pos=position)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/cms/models/pagemodel.py", line 1223, in move
super(Page, self).move(target, pos)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/treebeard/mp_tree.py", line 1037, in move
return MP_MoveHandler(self, target, pos).process()
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/treebeard/mp_tree.py", line 444, in process
if self.target.is_descendant_of(self.node):
AttributeError: 'NoneType' object has no attribute 'is_descendant_of'
|
AttributeError
|
def save(self, no_signals=False, commit=True, **kwargs):
"""
Args:
commit: True if model should be really saved
"""
# delete template cache
if hasattr(self, "_template_cache"):
delattr(self, "_template_cache")
created = not bool(self.pk)
if self.reverse_id == "":
self.reverse_id = None
if self.application_namespace == "":
self.application_namespace = None
from cms.utils.permissions import _thread_locals
user = getattr(_thread_locals, "user", None)
if user:
try:
changed_by = force_text(user)
except AttributeError:
# AnonymousUser may not have USERNAME_FIELD
changed_by = "anonymous"
else:
# limit changed_by and created_by to avoid problems with Custom User Model
if len(changed_by) > constants.PAGE_USERNAME_MAX_LENGTH:
changed_by = "{0}... (id={1})".format(
changed_by[: constants.PAGE_USERNAME_MAX_LENGTH - 15],
user.pk,
)
self.changed_by = changed_by
else:
self.changed_by = "script"
if created:
self.created_by = self.changed_by
if commit:
if not self.depth:
if self.parent_id:
self.depth = self.parent.depth + 1
self.parent.add_child(instance=self)
else:
self.add_root(instance=self)
return # add_root and add_child save as well
super(Page, self).save(**kwargs)
|
def save(self, no_signals=False, commit=True, **kwargs):
"""
Args:
commit: True if model should be really saved
"""
# delete template cache
if hasattr(self, "_template_cache"):
delattr(self, "_template_cache")
created = not bool(self.pk)
if self.reverse_id == "":
self.reverse_id = None
if self.application_namespace == "":
self.application_namespace = None
from cms.utils.permissions import _thread_locals
user = getattr(_thread_locals, "user", None)
if user:
try:
changed_by = force_text(user)
except AttributeError:
# AnonymousUser may not have USERNAME_FIELD
changed_by = "anonymous"
else:
# limit changed_by and created_by to avoid problems with Custom User Model
if len(changed_by) > constants.PAGE_USERNAME_MAX_LENGTH:
changed_by = "{0}... (id={1})".format(
changed_by[: constants.PAGE_USERNAME_MAX_LENGTH - 15],
user.pk,
)
self.changed_by = changed_by
else:
self.changed_by = "script"
if created:
self.created_by = self.changed_by
if commit:
if not self.depth:
if self.parent_id:
self.parent.add_child(instance=self)
else:
self.add_root(instance=self)
return # add_root and add_child save as well
super(Page, self).save(**kwargs)
|
https://github.com/django-cms/django-cms/issues/4102
|
Traceback (most recent call last):
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/utils/decorators.py", line 105, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/views/decorators/cache.py", line 52, in _wrapped_view_func
response = view_func(request, *args, **kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/contrib/admin/sites.py", line 206, in inner
return view(request, *args, **kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/django/db/transaction.py", line 394, in inner
return func(*args, **kwargs)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/cms/admin/pageadmin.py", line 848, in move_page
page.move_page(target, position)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/cms/models/pagemodel.py", line 216, in move_page
moved_page = self.move(target, pos=position)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/cms/models/pagemodel.py", line 1223, in move
super(Page, self).move(target, pos)
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/treebeard/mp_tree.py", line 1037, in move
return MP_MoveHandler(self, target, pos).process()
File "/home/user/.virtualenvs/myenv/local/lib/python2.7/site-packages/treebeard/mp_tree.py", line 444, in process
if self.target.is_descendant_of(self.node):
AttributeError: 'NoneType' object has no attribute 'is_descendant_of'
|
AttributeError
|
def save(self, no_signals=False, commit=True, **kwargs):
"""
Args:
commit: True if model should be really saved
"""
# delete template cache
if hasattr(self, "_template_cache"):
delattr(self, "_template_cache")
created = not bool(self.pk)
if self.reverse_id == "":
self.reverse_id = None
if self.application_namespace == "":
self.application_namespace = None
from cms.utils.permissions import _thread_locals
user = getattr(_thread_locals, "user", None)
if user:
try:
changed_by = force_text(user)
except AttributeError:
# AnonymousUser may not have USERNAME_FIELD
changed_by = "anonymous"
else:
# limit changed_by and created_by to avoid problems with Custom User Model
if len(changed_by) > constants.PAGE_USERNAME_MAX_LENGTH:
changed_by = "{0}... (id={1})".format(
changed_by[: constants.PAGE_USERNAME_MAX_LENGTH - 15],
user.pk,
)
self.changed_by = changed_by
else:
self.changed_by = "script"
if created:
self.created_by = self.changed_by
if commit:
if not self.depth:
if self.parent_id:
self.parent.add_child(instance=self)
else:
self.add_root(instance=self)
return # add_root and add_child save as well
super(Page, self).save(**kwargs)
|
def save(self, no_signals=False, commit=True, **kwargs):
"""
Args:
commit: True if model should be really saved
"""
# delete template cache
if hasattr(self, "_template_cache"):
delattr(self, "_template_cache")
created = not bool(self.pk)
if self.reverse_id == "":
self.reverse_id = None
if self.application_namespace == "":
self.application_namespace = None
from cms.utils.permissions import _thread_locals
user = getattr(_thread_locals, "user", None)
if user:
try:
self.changed_by = str(user)
except AttributeError:
# AnonymousUser may not have USERNAME_FIELD
self.changed_by = "anonymous"
else:
self.changed_by = "script"
if created:
self.created_by = self.changed_by
if commit:
if not self.depth:
if self.parent_id:
self.parent.add_child(instance=self)
else:
self.add_root(instance=self)
return # add_root and add_child save as well
super(Page, self).save(**kwargs)
|
https://github.com/django-cms/django-cms/issues/3938
|
Internal Server Error: /admin/cms/page/add/
Traceback (most recent call last):
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/backends/utils.py", line 65, in execute
return self.cursor.execute(sql, params)
psycopg2.DataError: value too long for type character varying(70)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/core/handlers/base.py", line 111, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/contrib/admin/options.py", line 583, in wrapper
return self.admin_site.admin_view(view)(*args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/utils/decorators.py", line 105, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/views/decorators/cache.py", line 52, in _wrapped_view_func
response = view_func(request, *args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/contrib/admin/sites.py", line 206, in inner
return view(request, *args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/cms/admin/pageadmin.py", line 406, in add_view
return super(PageAdmin, self).add_view(request, form_url, extra_context=extra_context)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/contrib/admin/options.py", line 1453, in add_view
return self.changeform_view(request, None, form_url, extra_context)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/utils/decorators.py", line 29, in _wrapper
return bound_func(*args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/utils/decorators.py", line 105, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/utils/decorators.py", line 25, in bound_func
return func.__get__(self, type(self))(*args2, **kwargs2)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/transaction.py", line 394, in inner
return func(*args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/contrib/admin/options.py", line 1404, in changeform_view
self.save_model(request, new_object, form, not add)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/cms/admin/pageadmin.py", line 206, in save_model
obj.save()
File "/home/flyweb/www/venv/lib/python3.4/site-packages/cms/models/pagemodel.py", line 457, in save
super(Page, self).save(**kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/mptt/models.py", line 838, in save
super(MPTTModel, self).save(*args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/models/base.py", line 589, in save
force_update=force_update, update_fields=update_fields)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/cms/models/pagemodel.py", line 475, in save_base
ret = super(Page, self).save_base(*args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/models/base.py", line 617, in save_base
updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/models/base.py", line 698, in _save_table
result = self._do_insert(cls._base_manager, using, fields, update_pk, raw)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/models/base.py", line 731, in _do_insert
using=using, raw=raw)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/models/manager.py", line 92, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/models/query.py", line 921, in _insert
return query.get_compiler(using=using).execute_sql(return_id)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/models/sql/compiler.py", line 920, in execute_sql
cursor.execute(sql, params)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/backends/utils.py", line 65, in execute
return self.cursor.execute(sql, params)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/utils.py", line 94, in __exit__
six.reraise(dj_exc_type, dj_exc_value, traceback)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/utils/six.py", line 658, in reraise
raise value.with_traceback(tb)
File "/home/flyweb/www/venv/lib/python3.4/site-packages/django/db/backends/utils.py", line 65, in execute
return self.cursor.execute(sql, params)
django.db.utils.DataError: value too long for type character varying(70)```
|
psycopg2.DataError
|
def _show_placeholder_for_page(
context, placeholder_name, page_lookup, lang=None, site=None, cache_result=True
):
"""
Shows the content of a page with a placeholder name and given lookup
arguments in the given language.
This is useful if you want to have some more or less static content that is
shared among many pages, such as a footer.
See _get_page_by_untyped_arg() for detailed information on the allowed types
and their interpretation for the page_lookup argument.
"""
from django.core.cache import cache
validate_placeholder_name(placeholder_name)
request = context.get("request", False)
site_id = get_site_id(site)
if not request:
return {"content": ""}
if lang is None:
lang = get_language_from_request(request)
if cache_result:
base_key = _get_cache_key(
"_show_placeholder_for_page", page_lookup, lang, site_id
)
cache_key = _clean_key("%s_placeholder:%s" % (base_key, placeholder_name))
cached_value = cache.get(cache_key)
if cached_value:
restore_sekizai_context(context, cached_value["sekizai"])
return {"content": mark_safe(cached_value["content"])}
page = _get_page_by_untyped_arg(page_lookup, request, site_id)
if not page:
return {"content": ""}
try:
placeholder = page.placeholders.get(slot=placeholder_name)
except PlaceholderModel.DoesNotExist:
if settings.DEBUG:
raise
return {"content": ""}
watcher = Watcher(context)
content = render_placeholder(
placeholder, context, placeholder_name, use_cache=cache_result
)
changes = watcher.get_changes()
if cache_result:
cache.set(
cache_key,
{"content": content, "sekizai": changes},
get_cms_setting("CACHE_DURATIONS")["content"],
)
if content:
return {"content": mark_safe(content)}
return {"content": ""}
|
def _show_placeholder_for_page(
context, placeholder_name, page_lookup, lang=None, site=None, cache_result=True
):
"""
Shows the content of a page with a placeholder name and given lookup
arguments in the given language.
This is useful if you want to have some more or less static content that is
shared among many pages, such as a footer.
See _get_page_by_untyped_arg() for detailed information on the allowed types
and their interpretation for the page_lookup argument.
"""
from django.core.cache import cache
validate_placeholder_name(placeholder_name)
request = context.get("request", False)
site_id = get_site_id(site)
if not request:
return {"content": ""}
if lang is None:
lang = get_language_from_request(request)
if cache_result:
base_key = _get_cache_key(
"_show_placeholder_for_page", page_lookup, lang, site_id
)
cache_key = _clean_key("%s_placeholder:%s" % (base_key, placeholder_name))
cached_value = cache.get(cache_key)
if cached_value:
restore_sekizai_context(context, cached_value["sekizai"])
return {"content": mark_safe(cached_value["content"])}
page = _get_page_by_untyped_arg(page_lookup, request, site_id)
if not page:
return {"content": ""}
try:
placeholder = page.placeholders.get(slot=placeholder_name)
except PlaceholderModel.DoesNotExist:
if settings.DEBUG:
raise
return {"content": ""}
watcher = Watcher(context)
content = render_placeholder(placeholder, context, placeholder_name)
changes = watcher.get_changes()
if cache_result:
cache.set(
cache_key,
{"content": content, "sekizai": changes},
get_cms_setting("CACHE_DURATIONS")["content"],
)
if content:
return {"content": mark_safe(content)}
return {"content": ""}
|
https://github.com/django-cms/django-cms/issues/3843
|
Traceback (most recent call last):
File "/path/to/env/lib/python2.6/site-packages/django/core/handlers/base.py", line 137, in get_response
response = response.render()
File "/path/to/env/lib/python2.6/site-packages/django/template/response.py", line 105, in render
self.content = self.rendered_content
File "/path/to/env/lib/python2.6/site-packages/django/template/response.py", line 82, in rendered_content
content = template.render(context)
File "/path/to/env/lib/python2.6/site-packages/django/template/base.py", line 140, in render
return self._render(context)
File "/path/to/env/lib/python2.6/site-packages/django/template/base.py", line 134, in _render
... Repeated calls to render/_render removed
return self.render_tag(context, **kwargs)
File "/path/to/env/lib/python2.6/site-packages/classytags/helpers.py", line 78, in render_tag
data = self.get_context(context, **kwargs)
File "/path/to/env/lib/python2.6/site-packages/cms/templatetags/cms_tags.py", line 619, in get_context
return _show_placeholder_for_page(**self.get_kwargs(*args, **kwargs))
File "/path/to/env/lib/python2.6/site-packages/cms/templatetags/cms_tags.py", line 597, in _show_placeholder_for_page
content = render_placeholder(placeholder, context, placeholder_name)
File "/path/to/env/lib/python2.6/site-packages/cms/plugin_rendering.py", line 135, in render_placeholder
restore_sekizai_context(context, cached_value['sekizai'])
File "/path/to/env/lib/python2.6/site-packages/cms/utils/placeholder.py", line 148, in restore_sekizai_context
sekizai_namespace = sekizai_container[key]
TypeError: 'NoneType' object is unsubscriptable
|
TypeError
|
def _onGCodeLayerMessage(self, message: Arcus.PythonMessage) -> None:
try:
self._scene.gcode_dict[self._start_slice_job_build_plate].append(
message.data.decode("utf-8", "replace")
) # type: ignore #Because we generate this attribute dynamically.
except KeyError: # Can occur if the g-code has been cleared while a slice message is still arriving from the other end.
pass # Throw the message away.
|
def _onGCodeLayerMessage(self, message: Arcus.PythonMessage) -> None:
self._scene.gcode_dict[self._start_slice_job_build_plate].append(
message.data.decode("utf-8", "replace")
) # type: ignore #Because we generate this attribute dynamically.
|
https://github.com/Ultimaker/Cura/issues/6245
|
2019-08-22 21:56:47,130 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [61]: An uncaught error has occurred!
2019-08-22 21:56:47,133 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: Traceback (most recent call last):
2019-08-22 21:56:47,134 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/cura/CuraApplication.py", line 990, in event
2019-08-22 21:56:47,134 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: return super().event(event)
2019-08-22 21:56:47,134 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Qt/QtApplication.py", line 428, in event
2019-08-22 21:56:47,135 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: event._function_event.call()
2019-08-22 21:56:47,135 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Event.py", line 189, in call
2019-08-22 21:56:47,135 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._function(*self._args, **self._kwargs)
2019-08-22 21:56:47,136 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Signal.py", line 315, in __performEmit
2019-08-22 21:56:47,136 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: func(dest, *args, **kwargs)
2019-08-22 21:56:47,136 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Backend/Backend.py", line 191, in _onMessageReceived
2019-08-22 21:56:47,136 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._message_handlers[message.getTypeName()](message)
2019-08-22 21:56:47,137 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/cura/plugins/CuraEngineBackend/CuraEngineBackend.py", line 674, in _onGCodeLayerMessage
2019-08-22 21:56:47,137 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._scene.gcode_dict[self._start_slice_job_build_plate].append(message.data.decode("utf-8", "replace")) #type: ignore #Because we generate this attribute dynamically.
2019-08-22 21:56:47,137 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: KeyError: 0
2019-08-22 21:56:47,157 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [61]: An uncaught error has occurred!
2019-08-22 21:56:47,157 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: Traceback (most recent call last):
2019-08-22 21:56:47,158 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/cura/CuraApplication.py", line 990, in event
2019-08-22 21:56:47,158 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: return super().event(event)
2019-08-22 21:56:47,158 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Qt/QtApplication.py", line 428, in event
2019-08-22 21:56:47,159 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: event._function_event.call()
2019-08-22 21:56:47,159 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Event.py", line 189, in call
2019-08-22 21:56:47,159 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._function(*self._args, **self._kwargs)
2019-08-22 21:56:47,160 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Signal.py", line 315, in __performEmit
2019-08-22 21:56:47,160 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: func(dest, *args, **kwargs)
2019-08-22 21:56:47,161 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Backend/Backend.py", line 191, in _onMessageReceived
2019-08-22 21:56:47,161 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._message_handlers[message.getTypeName()](message)
2019-08-22 21:56:47,161 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/cura/plugins/CuraEngineBackend/CuraEngineBackend.py", line 674, in _onGCodeLayerMessage
2019-08-22 21:56:47,162 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._scene.gcode_dict[self._start_slice_job_build_plate].append(message.data.decode("utf-8", "replace")) #type: ignore #Because we generate this attribute dynamically.
|
KeyError
|
def _onGCodePrefixMessage(self, message: Arcus.PythonMessage) -> None:
try:
self._scene.gcode_dict[self._start_slice_job_build_plate].insert(
0, message.data.decode("utf-8", "replace")
) # type: ignore #Because we generate this attribute dynamically.
except KeyError: # Can occur if the g-code has been cleared while a slice message is still arriving from the other end.
pass # Throw the message away.
|
def _onGCodePrefixMessage(self, message: Arcus.PythonMessage) -> None:
self._scene.gcode_dict[self._start_slice_job_build_plate].insert(
0, message.data.decode("utf-8", "replace")
) # type: ignore #Because we generate this attribute dynamically.
|
https://github.com/Ultimaker/Cura/issues/6245
|
2019-08-22 21:56:47,130 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [61]: An uncaught error has occurred!
2019-08-22 21:56:47,133 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: Traceback (most recent call last):
2019-08-22 21:56:47,134 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/cura/CuraApplication.py", line 990, in event
2019-08-22 21:56:47,134 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: return super().event(event)
2019-08-22 21:56:47,134 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Qt/QtApplication.py", line 428, in event
2019-08-22 21:56:47,135 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: event._function_event.call()
2019-08-22 21:56:47,135 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Event.py", line 189, in call
2019-08-22 21:56:47,135 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._function(*self._args, **self._kwargs)
2019-08-22 21:56:47,136 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Signal.py", line 315, in __performEmit
2019-08-22 21:56:47,136 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: func(dest, *args, **kwargs)
2019-08-22 21:56:47,136 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Backend/Backend.py", line 191, in _onMessageReceived
2019-08-22 21:56:47,136 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._message_handlers[message.getTypeName()](message)
2019-08-22 21:56:47,137 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/cura/plugins/CuraEngineBackend/CuraEngineBackend.py", line 674, in _onGCodeLayerMessage
2019-08-22 21:56:47,137 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._scene.gcode_dict[self._start_slice_job_build_plate].append(message.data.decode("utf-8", "replace")) #type: ignore #Because we generate this attribute dynamically.
2019-08-22 21:56:47,137 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: KeyError: 0
2019-08-22 21:56:47,157 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [61]: An uncaught error has occurred!
2019-08-22 21:56:47,157 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: Traceback (most recent call last):
2019-08-22 21:56:47,158 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/cura/CuraApplication.py", line 990, in event
2019-08-22 21:56:47,158 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: return super().event(event)
2019-08-22 21:56:47,158 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Qt/QtApplication.py", line 428, in event
2019-08-22 21:56:47,159 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: event._function_event.call()
2019-08-22 21:56:47,159 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Event.py", line 189, in call
2019-08-22 21:56:47,159 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._function(*self._args, **self._kwargs)
2019-08-22 21:56:47,160 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Signal.py", line 315, in __performEmit
2019-08-22 21:56:47,160 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: func(dest, *args, **kwargs)
2019-08-22 21:56:47,161 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/python3/dist-packages/UM/Backend/Backend.py", line 191, in _onMessageReceived
2019-08-22 21:56:47,161 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._message_handlers[message.getTypeName()](message)
2019-08-22 21:56:47,161 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: File "/opt/cura/lib/cura/plugins/CuraEngineBackend/CuraEngineBackend.py", line 674, in _onGCodeLayerMessage
2019-08-22 21:56:47,162 - CRITICAL - [MainThread] cura.CrashHandler.__init__ [64]: self._scene.gcode_dict[self._start_slice_job_build_plate].append(message.data.decode("utf-8", "replace")) #type: ignore #Because we generate this attribute dynamically.
|
KeyError
|
def _onActiveExtruderChanged(self):
new_active_stack = ExtruderManager.getInstance().getActiveExtruderStack()
if not new_active_stack:
self._active_container_stack = None
return
if new_active_stack != self._active_container_stack: # Check if changed
if (
self._active_container_stack
): # Disconnect signal from old container (if any)
self._active_container_stack.propertyChanged.disconnect(
self._onPropertyChanged
)
self._active_container_stack.containersChanged.disconnect(
self._onContainersChanged
)
self._active_container_stack = new_active_stack
self._active_container_stack.propertyChanged.connect(self._onPropertyChanged)
self._active_container_stack.containersChanged.connect(
self._onContainersChanged
)
self._update() # Ensure that the settings_with_inheritance_warning list is populated.
|
def _onActiveExtruderChanged(self):
new_active_stack = ExtruderManager.getInstance().getActiveExtruderStack()
# if not new_active_stack:
# new_active_stack = self._global_container_stack
if new_active_stack != self._active_container_stack: # Check if changed
if (
self._active_container_stack
): # Disconnect signal from old container (if any)
self._active_container_stack.propertyChanged.disconnect(
self._onPropertyChanged
)
self._active_container_stack.containersChanged.disconnect(
self._onContainersChanged
)
self._active_container_stack = new_active_stack
self._active_container_stack.propertyChanged.connect(self._onPropertyChanged)
self._active_container_stack.containersChanged.connect(
self._onContainersChanged
)
self._update() # Ensure that the settings_with_inheritance_warning list is populated.
|
https://github.com/Ultimaker/Cura/issues/3570
|
Traceback (most recent call last):
File "/usr/bin/cura", line 141, in <module>
app.run()
File "/usr/lib/python3/dist-packages/cura/CuraApplication.py", line 686, in run
self._volume = BuildVolume.BuildVolume(root)
File "/usr/lib/python3/dist-packages/cura/BuildVolume.py", line 107, in __init__
Application.getInstance().getMachineManager().activeQualityChanged.connect(self._onStackChanged)
File "/usr/lib/python3/dist-packages/cura/CuraApplication.py", line 747, in getMachineManager
self._machine_manager = MachineManager.createMachineManager()
File "/usr/lib/python3/dist-packages/cura/Settings/MachineManager.py", line 1328, in createMachineManager
return MachineManager()
File "/usr/lib/python3/dist-packages/cura/Settings/MachineManager.py", line 119, in __init__
self.setActiveMachine(active_machine_id)
File "/usr/lib/python3/dist-packages/cura/Settings/MachineManager.py", line 388, in setActiveMachine
Application.getInstance().setGlobalContainerStack(containers[0])
File "/usr/lib/python3/dist-packages/UM/Application.py", line 160, in setGlobalContainerStack
self.globalContainerStackChanged.emit()
File "/usr/lib/python3/dist-packages/UM/Signal.py", line 212, in emit
self.__performEmit(*args, **kwargs)
File "/usr/lib/python3/dist-packages/UM/Signal.py", line 315, in __performEmit
func(dest, *args, **kwargs)
File "/usr/lib/python3/dist-packages/cura/BuildVolume.py", line 500, in _onStackChanged
self._updateDisallowedAreas()
File "/usr/lib/python3/dist-packages/cura/BuildVolume.py", line 607, in _updateDisallowedAreas
disallowed_border_size = self._getEdgeDisallowedSize()
File "/usr/lib/python3/dist-packages/cura/BuildVolume.py", line 966, in _getEdgeDisallowedSize
move_from_wall_radius = max(move_from_wall_radius, max(self._getSettingFromAllExtruders("infill_wipe_dist")))
ValueError: max() arg is an empty sequence
|
ValueError
|
def _settingIsOverwritingInheritance(
self, key: str, stack: ContainerStack = None
) -> bool:
has_setting_function = False
if not stack:
stack = self._active_container_stack
if not stack: # No active container stack yet!
return False
containers = []
## Check if the setting has a user state. If not, it is never overwritten.
has_user_state = stack.getProperty(key, "state") == InstanceState.User
if not has_user_state:
return False
## If a setting is not enabled, don't label it as overwritten (It's never visible anyway).
if not stack.getProperty(key, "enabled"):
return False
## Also check if the top container is not a setting function (this happens if the inheritance is restored).
if isinstance(stack.getTop().getProperty(key, "value"), SettingFunction):
return False
## Mash all containers for all the stacks together.
while stack:
containers.extend(stack.getContainers())
stack = stack.getNextStack()
has_non_function_value = False
for container in containers:
try:
value = container.getProperty(key, "value")
except AttributeError:
continue
if value is not None:
# If a setting doesn't use any keys, it won't change it's value, so treat it as if it's a fixed value
has_setting_function = isinstance(value, SettingFunction)
if has_setting_function:
for setting_key in value.getUsedSettingKeys():
if setting_key in self._active_container_stack.getAllKeys():
break # We found an actual setting. So has_setting_function can remain true
else:
# All of the setting_keys turned out to not be setting keys at all!
# This can happen due enum keys also being marked as settings.
has_setting_function = False
if has_setting_function is False:
has_non_function_value = True
continue
if has_setting_function:
break # There is a setting function somewhere, stop looking deeper.
return has_setting_function and has_non_function_value
|
def _settingIsOverwritingInheritance(
self, key: str, stack: ContainerStack = None
) -> bool:
has_setting_function = False
if not stack:
stack = self._active_container_stack
containers = []
## Check if the setting has a user state. If not, it is never overwritten.
has_user_state = stack.getProperty(key, "state") == InstanceState.User
if not has_user_state:
return False
## If a setting is not enabled, don't label it as overwritten (It's never visible anyway).
if not stack.getProperty(key, "enabled"):
return False
## Also check if the top container is not a setting function (this happens if the inheritance is restored).
if isinstance(stack.getTop().getProperty(key, "value"), SettingFunction):
return False
## Mash all containers for all the stacks together.
while stack:
containers.extend(stack.getContainers())
stack = stack.getNextStack()
has_non_function_value = False
for container in containers:
try:
value = container.getProperty(key, "value")
except AttributeError:
continue
if value is not None:
# If a setting doesn't use any keys, it won't change it's value, so treat it as if it's a fixed value
has_setting_function = isinstance(value, SettingFunction)
if has_setting_function:
for setting_key in value.getUsedSettingKeys():
if setting_key in self._active_container_stack.getAllKeys():
break # We found an actual setting. So has_setting_function can remain true
else:
# All of the setting_keys turned out to not be setting keys at all!
# This can happen due enum keys also being marked as settings.
has_setting_function = False
if has_setting_function is False:
has_non_function_value = True
continue
if has_setting_function:
break # There is a setting function somewhere, stop looking deeper.
return has_setting_function and has_non_function_value
|
https://github.com/Ultimaker/Cura/issues/3570
|
Traceback (most recent call last):
File "/usr/bin/cura", line 141, in <module>
app.run()
File "/usr/lib/python3/dist-packages/cura/CuraApplication.py", line 686, in run
self._volume = BuildVolume.BuildVolume(root)
File "/usr/lib/python3/dist-packages/cura/BuildVolume.py", line 107, in __init__
Application.getInstance().getMachineManager().activeQualityChanged.connect(self._onStackChanged)
File "/usr/lib/python3/dist-packages/cura/CuraApplication.py", line 747, in getMachineManager
self._machine_manager = MachineManager.createMachineManager()
File "/usr/lib/python3/dist-packages/cura/Settings/MachineManager.py", line 1328, in createMachineManager
return MachineManager()
File "/usr/lib/python3/dist-packages/cura/Settings/MachineManager.py", line 119, in __init__
self.setActiveMachine(active_machine_id)
File "/usr/lib/python3/dist-packages/cura/Settings/MachineManager.py", line 388, in setActiveMachine
Application.getInstance().setGlobalContainerStack(containers[0])
File "/usr/lib/python3/dist-packages/UM/Application.py", line 160, in setGlobalContainerStack
self.globalContainerStackChanged.emit()
File "/usr/lib/python3/dist-packages/UM/Signal.py", line 212, in emit
self.__performEmit(*args, **kwargs)
File "/usr/lib/python3/dist-packages/UM/Signal.py", line 315, in __performEmit
func(dest, *args, **kwargs)
File "/usr/lib/python3/dist-packages/cura/BuildVolume.py", line 500, in _onStackChanged
self._updateDisallowedAreas()
File "/usr/lib/python3/dist-packages/cura/BuildVolume.py", line 607, in _updateDisallowedAreas
disallowed_border_size = self._getEdgeDisallowedSize()
File "/usr/lib/python3/dist-packages/cura/BuildVolume.py", line 966, in _getEdgeDisallowedSize
move_from_wall_radius = max(move_from_wall_radius, max(self._getSettingFromAllExtruders("infill_wipe_dist")))
ValueError: max() arg is an empty sequence
|
ValueError
|
def fit(self, X, y):
"""Fit the model using X as training data and y as target values
Parameters
----------
X : sktime-format pandas dataframe with shape([n_cases,n_dimensions]),
or numpy ndarray with shape([n_cases,n_readings,n_dimensions])
y : {array-like, sparse matrix}
Target values of shape = [n_samples]
"""
X, y = check_X_y(X, y, enforce_univariate=False, coerce_to_numpy=True)
y = np.asarray(y)
check_classification_targets(y)
# if internal cv is desired, the relevant flag forces a grid search
# to evaluate the possible values,
# find the best, and then set this classifier's params to match
if self._cv_for_params:
grid = GridSearchCV(
estimator=KNeighborsTimeSeriesClassifier(
metric=self.metric, n_neighbors=1, algorithm="brute"
),
param_grid=self._param_matrix,
cv=LeaveOneOut(),
scoring="accuracy",
)
grid.fit(X, y)
self.metric_params = grid.best_params_["metric_params"]
if y.ndim == 1 or y.ndim == 2 and y.shape[1] == 1:
if y.ndim != 1:
warnings.warn(
"IN TS-KNN: A column-vector y was passed when a 1d array "
"was expected. Please change the shape of y to "
"(n_samples, ), for example using ravel().",
DataConversionWarning,
stacklevel=2,
)
self.outputs_2d_ = False
y = y.reshape((-1, 1))
else:
self.outputs_2d_ = True
self.classes_ = []
self._y = np.empty(y.shape, dtype=np.int)
for k in range(self._y.shape[1]):
classes, self._y[:, k] = np.unique(y[:, k], return_inverse=True)
self.classes_.append(classes)
if not self.outputs_2d_:
self.classes_ = self.classes_[0]
self._y = self._y.ravel()
if hasattr(check_array, "__wrapped__"):
temp = check_array.__wrapped__.__code__
check_array.__wrapped__.__code__ = _check_array_ts.__code__
else:
temp = check_array.__code__
check_array.__code__ = _check_array_ts.__code__
# this not fx = self._fit(X, self_y) in order to maintain backward
# compatibility with scikit learn 0.23, where _fit does not take an arg y
fx = self._fit(X)
if hasattr(check_array, "__wrapped__"):
check_array.__wrapped__.__code__ = temp
else:
check_array.__code__ = temp
self._is_fitted = True
return fx
|
def fit(self, X, y):
"""Fit the model using X as training data and y as target values
Parameters
----------
X : sktime-format pandas dataframe with shape([n_cases,n_dimensions]),
or numpy ndarray with shape([n_cases,n_readings,n_dimensions])
y : {array-like, sparse matrix}
Target values of shape = [n_samples]
"""
X, y = check_X_y(X, y, enforce_univariate=False, coerce_to_numpy=True)
y = np.asarray(y)
check_classification_targets(y)
# print(X)
# if internal cv is desired, the relevant flag forces a grid search
# to evaluate the possible values,
# find the best, and then set this classifier's params to match
if self._cv_for_params:
grid = GridSearchCV(
estimator=KNeighborsTimeSeriesClassifier(
metric=self.metric, n_neighbors=1, algorithm="brute"
),
param_grid=self._param_matrix,
cv=LeaveOneOut(),
scoring="accuracy",
)
grid.fit(X, y)
self.metric_params = grid.best_params_["metric_params"]
if y.ndim == 1 or y.ndim == 2 and y.shape[1] == 1:
if y.ndim != 1:
warnings.warn(
"IN TS-KNN: A column-vector y was passed when a 1d array "
"was expected. Please change the shape of y to "
"(n_samples, ), for example using ravel().",
DataConversionWarning,
stacklevel=2,
)
self.outputs_2d_ = False
y = y.reshape((-1, 1))
else:
self.outputs_2d_ = True
self.classes_ = []
self._y = np.empty(y.shape, dtype=np.int)
for k in range(self._y.shape[1]):
classes, self._y[:, k] = np.unique(y[:, k], return_inverse=True)
self.classes_.append(classes)
if not self.outputs_2d_:
self.classes_ = self.classes_[0]
self._y = self._y.ravel()
if hasattr(check_array, "__wrapped__"):
temp = check_array.__wrapped__.__code__
check_array.__wrapped__.__code__ = _check_array_ts.__code__
else:
temp = check_array.__code__
check_array.__code__ = _check_array_ts.__code__
fx = self._fit(X, self._y)
if hasattr(check_array, "__wrapped__"):
check_array.__wrapped__.__code__ = temp
else:
check_array.__code__ = temp
self._is_fitted = True
return fx
|
https://github.com/alan-turing-institute/sktime/issues/608
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-25-a54f834dd466> in <module>
2
3 knn = KNeighborsTimeSeriesClassifier(n_neighbors=1, metric="dtw")
----> 4 knn.fit(X_train, y_train)
5 knn.score(X_test, y_test)
6
~\Desktop\sktime\sktime\classification\distance_based\_time_series_neighbors.py in fit(self, X, y)
250 check_array.__code__ = _check_array_ts.__code__
251
--> 252 fx = self._fit(X, self._y)
253
254 if hasattr(check_array, "__wrapped__"):
TypeError: _fit() takes 2 positional arguments but 3 were given
|
TypeError
|
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables (ignored)
Returns
-------
self : returns an instance of self.
"""
y, X = check_y_X(y, X)
self._set_y_X(y, X)
self._set_fh(fh)
self._forecaster = self._instantiate_model()
self._forecaster = self._forecaster.fit(y)
self._is_fitted = True
return self
|
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables (ignored)
Returns
-------
self : returns an instance of self.
"""
y, X = check_y_X(y, X, warn_X=True)
self._set_y_X(y, X)
self._set_fh(fh)
self._forecaster = self._instantiate_model()
self._forecaster = self._forecaster.fit(y)
self._is_fitted = True
return self
|
https://github.com/alan-turing-institute/sktime/issues/540
|
ValueError Traceback (most recent call last)
<ipython-input-3-4a75cca05732> in <module>
16 MODEL_Auto_ARIMA.fit(
17 y_train=y_df["y"],
---> 18 X_train=X_df
19 )
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/sktime/forecasting/arima.py in fit(self, y_train, fh, X_train, **fit_args)
363 self._set_y_X(y_train, X_train)
364 self._set_fh(fh)
--> 365 self._forecaster.fit(y_train, exogenous=X_train, **fit_args)
366 self._is_fitted = True
367 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in fit(self, y, X, **fit_args)
218 with_intercept=self.with_intercept,
219 sarimax_kwargs=sarimax_kwargs,
--> 220 **fit_args)
221
222 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in auto_arima(y, X, start_p, d, start_q, max_p, max_d, max_q, start_P, D, start_Q, max_P, max_D, max_Q, max_order, m, seasonal, stationary, information_criterion, alpha, test, seasonal_test, stepwise, n_jobs, start_params, trend, method, maxiter, offset_test_args, seasonal_test_args, suppress_warnings, error_action, trace, random, random_state, n_fits, return_valid_fits, out_of_sample_size, scoring, scoring_args, with_intercept, sarimax_kwargs, **fit_args)
390
391 # Temporary shim until we remove `exogenous` support completely
--> 392 X, fit_args = pm_compat.get_X(X, **fit_args)
393
394 # pop out the deprecated kwargs
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/compat/pmdarima.py in get_X(X, **kwargs)
12 exog = kwargs.pop("exogenous", None)
13 if X is not None and exog is not None:
---> 14 raise ValueError("Multiple values provided for both X and exogenous")
15
16 if exog is not None:
ValueError: Multiple values provided for both X and exogenous
|
ValueError
|
def _transform(self, y, X=None):
"""Transform data using rolling window approach"""
if X is not None:
raise NotImplementedError()
y = check_y(y)
# get integer time index
cv = self._cv
# Transform target series into tabular format using
# rolling window tabularisation
x_windows = []
y_windows = []
for x_index, y_index in cv.split(y):
x_window = y.iloc[x_index]
y_window = y.iloc[y_index]
x_windows.append(x_window)
y_windows.append(y_window)
# Put into required input format for regression
X, y = self._format_windows(x_windows, y_windows)
return X, y
|
def _transform(self, y_train, X_train=None):
"""Transform data using rolling window approach"""
if X_train is not None:
raise NotImplementedError()
y_train = check_y(y_train)
# get integer time index
cv = self._cv
# Transform target series into tabular format using
# rolling window tabularisation
x_windows = []
y_windows = []
for x_index, y_index in cv.split(y_train):
x_window = y_train.iloc[x_index]
y_window = y_train.iloc[y_index]
x_windows.append(x_window)
y_windows.append(y_window)
# Put into required input format for regression
X_train, y_train = self._format_windows(x_windows, y_windows)
return X_train, y_train
|
https://github.com/alan-turing-institute/sktime/issues/540
|
ValueError Traceback (most recent call last)
<ipython-input-3-4a75cca05732> in <module>
16 MODEL_Auto_ARIMA.fit(
17 y_train=y_df["y"],
---> 18 X_train=X_df
19 )
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/sktime/forecasting/arima.py in fit(self, y_train, fh, X_train, **fit_args)
363 self._set_y_X(y_train, X_train)
364 self._set_fh(fh)
--> 365 self._forecaster.fit(y_train, exogenous=X_train, **fit_args)
366 self._is_fitted = True
367 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in fit(self, y, X, **fit_args)
218 with_intercept=self.with_intercept,
219 sarimax_kwargs=sarimax_kwargs,
--> 220 **fit_args)
221
222 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in auto_arima(y, X, start_p, d, start_q, max_p, max_d, max_q, start_P, D, start_Q, max_P, max_D, max_Q, max_order, m, seasonal, stationary, information_criterion, alpha, test, seasonal_test, stepwise, n_jobs, start_params, trend, method, maxiter, offset_test_args, seasonal_test_args, suppress_warnings, error_action, trace, random, random_state, n_fits, return_valid_fits, out_of_sample_size, scoring, scoring_args, with_intercept, sarimax_kwargs, **fit_args)
390
391 # Temporary shim until we remove `exogenous` support completely
--> 392 X, fit_args = pm_compat.get_X(X, **fit_args)
393
394 # pop out the deprecated kwargs
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/compat/pmdarima.py in get_X(X, **kwargs)
12 exog = kwargs.pop("exogenous", None)
13 if X is not None and exog is not None:
---> 14 raise ValueError("Multiple values provided for both X and exogenous")
15
16 if exog is not None:
ValueError: Multiple values provided for both X and exogenous
|
ValueError
|
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
# input checks
if X is not None:
raise NotImplementedError(
"Support for exogenous variables is not yet implemented"
)
# set values
self._set_y_X(y, X)
self._set_fh(fh)
self.step_length_ = check_step_length(self.step_length)
self.window_length_ = check_window_length(self.window_length)
# set up cv iterator, for recursive strategy, a single estimator
# is fit for a one-step-ahead forecasting horizon and then called
# iteratively to predict multiple steps ahead
self._cv = SlidingWindowSplitter(
fh=1,
window_length=self.window_length_,
step_length=self.step_length_,
start_with_window=True,
)
# transform data into tabular form
X_train_tab, y_train_tab = self._transform(y, X)
# fit base regressor
regressor = clone(self.regressor)
regressor.fit(X_train_tab, y_train_tab.ravel())
self.regressor_ = regressor
self._is_fitted = True
return self
|
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
# input checks
if X is not None:
raise NotImplementedError()
# set values
self._set_y_X(y, X)
self._set_fh(fh)
self.step_length_ = check_step_length(self.step_length)
self.window_length_ = check_window_length(self.window_length)
# set up cv iterator, for recursive strategy, a single estimator
# is fit for a one-step-ahead forecasting horizon and then called
# iteratively to predict multiple steps ahead
self._cv = SlidingWindowSplitter(
fh=1,
window_length=self.window_length_,
step_length=self.step_length_,
start_with_window=True,
)
# transform data into tabular form
X_train_tab, y_train_tab = self._transform(y, X)
# fit base regressor
regressor = clone(self.regressor)
regressor.fit(X_train_tab, y_train_tab.ravel())
self.regressor_ = regressor
self._is_fitted = True
return self
|
https://github.com/alan-turing-institute/sktime/issues/540
|
ValueError Traceback (most recent call last)
<ipython-input-3-4a75cca05732> in <module>
16 MODEL_Auto_ARIMA.fit(
17 y_train=y_df["y"],
---> 18 X_train=X_df
19 )
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/sktime/forecasting/arima.py in fit(self, y_train, fh, X_train, **fit_args)
363 self._set_y_X(y_train, X_train)
364 self._set_fh(fh)
--> 365 self._forecaster.fit(y_train, exogenous=X_train, **fit_args)
366 self._is_fitted = True
367 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in fit(self, y, X, **fit_args)
218 with_intercept=self.with_intercept,
219 sarimax_kwargs=sarimax_kwargs,
--> 220 **fit_args)
221
222 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in auto_arima(y, X, start_p, d, start_q, max_p, max_d, max_q, start_P, D, start_Q, max_P, max_D, max_Q, max_order, m, seasonal, stationary, information_criterion, alpha, test, seasonal_test, stepwise, n_jobs, start_params, trend, method, maxiter, offset_test_args, seasonal_test_args, suppress_warnings, error_action, trace, random, random_state, n_fits, return_valid_fits, out_of_sample_size, scoring, scoring_args, with_intercept, sarimax_kwargs, **fit_args)
390
391 # Temporary shim until we remove `exogenous` support completely
--> 392 X, fit_args = pm_compat.get_X(X, **fit_args)
393
394 # pop out the deprecated kwargs
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/compat/pmdarima.py in get_X(X, **kwargs)
12 exog = kwargs.pop("exogenous", None)
13 if X is not None and exog is not None:
---> 14 raise ValueError("Multiple values provided for both X and exogenous")
15
16 if exog is not None:
ValueError: Multiple values provided for both X and exogenous
|
ValueError
|
def fit(self, y, X=None, fh=None, **fit_params):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
y, X = check_y_X(y, X)
# validate cross-validator
cv = check_cv(self.cv)
base_forecaster = clone(self.forecaster)
scoring = check_scoring(self.scoring)
scorers = {scoring.name: scoring}
refit_metric = scoring.name
fit_and_score_kwargs = dict(
scorer=scorers,
fit_params=fit_params,
return_train_score=self.return_train_score,
return_times=True,
return_parameters=False,
error_score=self.error_score,
verbose=self.verbose,
)
results = {}
all_candidate_params = []
all_out = []
def evaluate_candidates(candidate_params):
candidate_params = list(candidate_params)
n_candidates = len(candidate_params)
if self.verbose > 0:
n_splits = cv.get_n_splits(y)
print( # noqa
"Fitting {0} folds for each of {1} candidates,"
" totalling {2} fits".format(
n_splits, n_candidates, n_candidates * n_splits
)
)
out = []
for parameters in candidate_params:
r = _fit_and_score(
clone(base_forecaster),
cv,
y,
X,
parameters=parameters,
**fit_and_score_kwargs,
)
out.append(r)
n_splits = cv.get_n_splits(y)
if len(out) < 1:
raise ValueError(
"No fits were performed. "
"Was the CV iterator empty? "
"Were there no candidates?"
)
all_candidate_params.extend(candidate_params)
all_out.extend(out)
nonlocal results
results = self._format_results(all_candidate_params, scorers, all_out)
return results
self._run_search(evaluate_candidates)
self.best_index_ = results["rank_test_%s" % refit_metric].argmin()
self.best_score_ = results["mean_test_%s" % refit_metric][self.best_index_]
self.best_params_ = results["params"][self.best_index_]
self.best_forecaster_ = clone(base_forecaster).set_params(**self.best_params_)
if self.refit:
refit_start_time = time.time()
self.best_forecaster_.fit(y, X, fh)
self.refit_time_ = time.time() - refit_start_time
# Store the only scorer not as a dict for single metric evaluation
self.scorer_ = scorers[scoring.name]
self.cv_results_ = results
self.n_splits_ = cv.get_n_splits(y)
self._is_fitted = True
return self
|
def fit(self, y, X=None, fh=None, **fit_params):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
y = check_y(y)
# validate cross-validator
cv = check_cv(self.cv)
base_forecaster = clone(self.forecaster)
scoring = check_scoring(self.scoring)
scorers = {scoring.name: scoring}
refit_metric = scoring.name
fit_and_score_kwargs = dict(
scorer=scorers,
fit_params=fit_params,
return_train_score=self.return_train_score,
return_times=True,
return_parameters=False,
error_score=self.error_score,
verbose=self.verbose,
)
results = {}
all_candidate_params = []
all_out = []
def evaluate_candidates(candidate_params):
candidate_params = list(candidate_params)
n_candidates = len(candidate_params)
if self.verbose > 0:
n_splits = cv.get_n_splits(y)
print( # noqa
"Fitting {0} folds for each of {1} candidates,"
" totalling {2} fits".format(
n_splits, n_candidates, n_candidates * n_splits
)
)
out = []
for parameters in candidate_params:
r = _fit_and_score(
clone(base_forecaster),
cv,
y,
X,
parameters=parameters,
**fit_and_score_kwargs,
)
out.append(r)
n_splits = cv.get_n_splits(y)
if len(out) < 1:
raise ValueError(
"No fits were performed. "
"Was the CV iterator empty? "
"Were there no candidates?"
)
all_candidate_params.extend(candidate_params)
all_out.extend(out)
nonlocal results
results = self._format_results(all_candidate_params, scorers, all_out)
return results
self._run_search(evaluate_candidates)
self.best_index_ = results["rank_test_%s" % refit_metric].argmin()
self.best_score_ = results["mean_test_%s" % refit_metric][self.best_index_]
self.best_params_ = results["params"][self.best_index_]
self.best_forecaster_ = clone(base_forecaster).set_params(**self.best_params_)
if self.refit:
refit_start_time = time.time()
self.best_forecaster_.fit(y, X, fh)
self.refit_time_ = time.time() - refit_start_time
# Store the only scorer not as a dict for single metric evaluation
self.scorer_ = scorers[scoring.name]
self.cv_results_ = results
self.n_splits_ = cv.get_n_splits(y)
self._is_fitted = True
return self
|
https://github.com/alan-turing-institute/sktime/issues/540
|
ValueError Traceback (most recent call last)
<ipython-input-3-4a75cca05732> in <module>
16 MODEL_Auto_ARIMA.fit(
17 y_train=y_df["y"],
---> 18 X_train=X_df
19 )
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/sktime/forecasting/arima.py in fit(self, y_train, fh, X_train, **fit_args)
363 self._set_y_X(y_train, X_train)
364 self._set_fh(fh)
--> 365 self._forecaster.fit(y_train, exogenous=X_train, **fit_args)
366 self._is_fitted = True
367 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in fit(self, y, X, **fit_args)
218 with_intercept=self.with_intercept,
219 sarimax_kwargs=sarimax_kwargs,
--> 220 **fit_args)
221
222 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in auto_arima(y, X, start_p, d, start_q, max_p, max_d, max_q, start_P, D, start_Q, max_P, max_D, max_Q, max_order, m, seasonal, stationary, information_criterion, alpha, test, seasonal_test, stepwise, n_jobs, start_params, trend, method, maxiter, offset_test_args, seasonal_test_args, suppress_warnings, error_action, trace, random, random_state, n_fits, return_valid_fits, out_of_sample_size, scoring, scoring_args, with_intercept, sarimax_kwargs, **fit_args)
390
391 # Temporary shim until we remove `exogenous` support completely
--> 392 X, fit_args = pm_compat.get_X(X, **fit_args)
393
394 # pop out the deprecated kwargs
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/compat/pmdarima.py in get_X(X, **kwargs)
12 exog = kwargs.pop("exogenous", None)
13 if X is not None and exog is not None:
---> 14 raise ValueError("Multiple values provided for both X and exogenous")
15
16 if exog is not None:
ValueError: Multiple values provided for both X and exogenous
|
ValueError
|
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
y, _ = check_y_X(y, X)
sp = check_sp(self.sp)
if sp > 1 and not self.deseasonalize:
warn("`sp` is ignored when `deseasonalise`=False")
if self.deseasonalize:
self.deseasonalizer_ = Deseasonalizer(sp=self.sp, model="multiplicative")
y = self.deseasonalizer_.fit_transform(y)
# fit exponential smoothing forecaster
# find theta lines: Theta lines are just SES + drift
super(ThetaForecaster, self).fit(y, fh=fh)
self.smoothing_level_ = self._fitted_forecaster.params["smoothing_level"]
# compute trend
self.trend_ = self._compute_trend(y)
self._is_fitted = True
return self
|
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
y = check_y(y)
sp = check_sp(self.sp)
if sp > 1 and not self.deseasonalize:
warn("`sp` is ignored when `deseasonalise`=False")
if self.deseasonalize:
self.deseasonalizer_ = Deseasonalizer(sp=self.sp, model="multiplicative")
y = self.deseasonalizer_.fit_transform(y)
# fit exponential smoothing forecaster
# find theta lines: Theta lines are just SES + drift
super(ThetaForecaster, self).fit(y, fh=fh)
self.smoothing_level_ = self._fitted_forecaster.params["smoothing_level"]
# compute trend
self.trend_ = self._compute_trend(y)
self._is_fitted = True
return self
|
https://github.com/alan-turing-institute/sktime/issues/540
|
ValueError Traceback (most recent call last)
<ipython-input-3-4a75cca05732> in <module>
16 MODEL_Auto_ARIMA.fit(
17 y_train=y_df["y"],
---> 18 X_train=X_df
19 )
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/sktime/forecasting/arima.py in fit(self, y_train, fh, X_train, **fit_args)
363 self._set_y_X(y_train, X_train)
364 self._set_fh(fh)
--> 365 self._forecaster.fit(y_train, exogenous=X_train, **fit_args)
366 self._is_fitted = True
367 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in fit(self, y, X, **fit_args)
218 with_intercept=self.with_intercept,
219 sarimax_kwargs=sarimax_kwargs,
--> 220 **fit_args)
221
222 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in auto_arima(y, X, start_p, d, start_q, max_p, max_d, max_q, start_P, D, start_Q, max_P, max_D, max_Q, max_order, m, seasonal, stationary, information_criterion, alpha, test, seasonal_test, stepwise, n_jobs, start_params, trend, method, maxiter, offset_test_args, seasonal_test_args, suppress_warnings, error_action, trace, random, random_state, n_fits, return_valid_fits, out_of_sample_size, scoring, scoring_args, with_intercept, sarimax_kwargs, **fit_args)
390
391 # Temporary shim until we remove `exogenous` support completely
--> 392 X, fit_args = pm_compat.get_X(X, **fit_args)
393
394 # pop out the deprecated kwargs
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/compat/pmdarima.py in get_X(X, **kwargs)
12 exog = kwargs.pop("exogenous", None)
13 if X is not None and exog is not None:
---> 14 raise ValueError("Multiple values provided for both X and exogenous")
15
16 if exog is not None:
ValueError: Multiple values provided for both X and exogenous
|
ValueError
|
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series with which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecast horizon with the steps ahead to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
if X is not None:
raise NotImplementedError(
"Support for exogenous variables is not yet implemented"
)
self._set_y_X(y, X)
self._set_fh(fh)
# for default regressor, set fit_intercept=False as we generate a
# dummy variable in polynomial features
if self.regressor is None:
regressor = LinearRegression(fit_intercept=False)
else:
regressor = self.regressor
# make pipeline with polynomial features
self.regressor_ = make_pipeline(
PolynomialFeatures(degree=self.degree, include_bias=self.with_intercept),
regressor,
)
# transform data
n_timepoints = _get_duration(self._y.index, coerce_to_int=True) + 1
X = np.arange(n_timepoints).reshape(-1, 1)
# fit regressor
self.regressor_.fit(X, y)
self._is_fitted = True
return self
|
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series with which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecast horizon with the steps ahead to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
if X is not None:
raise NotImplementedError("Exogeneous variables are not yet supported")
self._set_y_X(y, X)
self._set_fh(fh)
# for default regressor, set fit_intercept=False as we generate a
# dummy variable in polynomial features
if self.regressor is None:
regressor = LinearRegression(fit_intercept=False)
else:
regressor = self.regressor
# make pipeline with polynomial features
self.regressor_ = make_pipeline(
PolynomialFeatures(degree=self.degree, include_bias=self.with_intercept),
regressor,
)
# transform data
n_timepoints = _get_duration(self._y.index, coerce_to_int=True) + 1
X = np.arange(n_timepoints).reshape(-1, 1)
# fit regressor
self.regressor_.fit(X, y)
self._is_fitted = True
return self
|
https://github.com/alan-turing-institute/sktime/issues/540
|
ValueError Traceback (most recent call last)
<ipython-input-3-4a75cca05732> in <module>
16 MODEL_Auto_ARIMA.fit(
17 y_train=y_df["y"],
---> 18 X_train=X_df
19 )
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/sktime/forecasting/arima.py in fit(self, y_train, fh, X_train, **fit_args)
363 self._set_y_X(y_train, X_train)
364 self._set_fh(fh)
--> 365 self._forecaster.fit(y_train, exogenous=X_train, **fit_args)
366 self._is_fitted = True
367 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in fit(self, y, X, **fit_args)
218 with_intercept=self.with_intercept,
219 sarimax_kwargs=sarimax_kwargs,
--> 220 **fit_args)
221
222 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in auto_arima(y, X, start_p, d, start_q, max_p, max_d, max_q, start_P, D, start_Q, max_P, max_D, max_Q, max_order, m, seasonal, stationary, information_criterion, alpha, test, seasonal_test, stepwise, n_jobs, start_params, trend, method, maxiter, offset_test_args, seasonal_test_args, suppress_warnings, error_action, trace, random, random_state, n_fits, return_valid_fits, out_of_sample_size, scoring, scoring_args, with_intercept, sarimax_kwargs, **fit_args)
390
391 # Temporary shim until we remove `exogenous` support completely
--> 392 X, fit_args = pm_compat.get_X(X, **fit_args)
393
394 # pop out the deprecated kwargs
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/compat/pmdarima.py in get_X(X, **kwargs)
12 exog = kwargs.pop("exogenous", None)
13 if X is not None and exog is not None:
---> 14 raise ValueError("Multiple values provided for both X and exogenous")
15
16 if exog is not None:
ValueError: Multiple values provided for both X and exogenous
|
ValueError
|
def fit(self, Z, X=None):
"""Fit to data.
Parameters
----------
Z : pd.Series
X : pd.DataFrame
Returns
-------
self : an instance of self
"""
z = check_series(Z, enforce_univariate=True)
self._set_y_index(z)
sp = check_sp(self.sp)
# apply seasonal decomposition
self.seasonal_ = seasonal_decompose(
z,
model=self.model,
period=sp,
filt=None,
two_sided=True,
extrapolate_trend=0,
).seasonal.iloc[:sp]
self._is_fitted = True
return self
|
def fit(self, Z, X=None):
"""Fit to data.
Parameters
----------
y : pd.Series
X : pd.DataFrame
fit_params : dict
Returns
-------
self : an instance of self
"""
z = check_series(Z, enforce_univariate=True)
self._set_y_index(z)
sp = check_sp(self.sp)
# apply seasonal decomposition
self.seasonal_ = seasonal_decompose(
z,
model=self.model,
period=sp,
filt=None,
two_sided=True,
extrapolate_trend=0,
).seasonal.iloc[:sp]
self._is_fitted = True
return self
|
https://github.com/alan-turing-institute/sktime/issues/540
|
ValueError Traceback (most recent call last)
<ipython-input-3-4a75cca05732> in <module>
16 MODEL_Auto_ARIMA.fit(
17 y_train=y_df["y"],
---> 18 X_train=X_df
19 )
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/sktime/forecasting/arima.py in fit(self, y_train, fh, X_train, **fit_args)
363 self._set_y_X(y_train, X_train)
364 self._set_fh(fh)
--> 365 self._forecaster.fit(y_train, exogenous=X_train, **fit_args)
366 self._is_fitted = True
367 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in fit(self, y, X, **fit_args)
218 with_intercept=self.with_intercept,
219 sarimax_kwargs=sarimax_kwargs,
--> 220 **fit_args)
221
222 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in auto_arima(y, X, start_p, d, start_q, max_p, max_d, max_q, start_P, D, start_Q, max_P, max_D, max_Q, max_order, m, seasonal, stationary, information_criterion, alpha, test, seasonal_test, stepwise, n_jobs, start_params, trend, method, maxiter, offset_test_args, seasonal_test_args, suppress_warnings, error_action, trace, random, random_state, n_fits, return_valid_fits, out_of_sample_size, scoring, scoring_args, with_intercept, sarimax_kwargs, **fit_args)
390
391 # Temporary shim until we remove `exogenous` support completely
--> 392 X, fit_args = pm_compat.get_X(X, **fit_args)
393
394 # pop out the deprecated kwargs
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/compat/pmdarima.py in get_X(X, **kwargs)
12 exog = kwargs.pop("exogenous", None)
13 if X is not None and exog is not None:
---> 14 raise ValueError("Multiple values provided for both X and exogenous")
15
16 if exog is not None:
ValueError: Multiple values provided for both X and exogenous
|
ValueError
|
def check_y_X(y, X=None, allow_empty=False, allow_constant=True):
"""Validate input data.
Parameters
----------
y : pd.Series
X : pd.DataFrame, optional (default=None)
allow_empty : bool, optional (default=False)
If True, empty `y` does not raise an error.
allow_constant : bool, optional (default=True)
If True, constant `y` does not raise an error.
Raises
------
ValueError
If y or X are invalid inputs
"""
y = check_y(y, allow_empty=allow_empty, allow_constant=allow_constant)
if X is not None:
X = check_X(X=X)
check_equal_time_index(y, X)
return y, X
|
def check_y_X(y, X=None, allow_empty=False, allow_constant=True, warn_X=False):
"""Validate input data.
Parameters
----------
y : pd.Series
X : pd.DataFrame, optional (default=None)
allow_empty : bool, optional (default=False)
If True, empty `y` does not raise an error.
allow_constant : bool, optional (default=True)
If True, constant `y` does not raise an error.
warn_X : bool, optional (default=False)
Raises a warning if True.
Raises
------
ValueError
If y or X are invalid inputs
"""
y = check_y(y, allow_empty=allow_empty, allow_constant=allow_constant)
if X is not None:
X = check_X(X=X, warn_X=warn_X)
check_equal_time_index(y, X)
return y, X
|
https://github.com/alan-turing-institute/sktime/issues/540
|
ValueError Traceback (most recent call last)
<ipython-input-3-4a75cca05732> in <module>
16 MODEL_Auto_ARIMA.fit(
17 y_train=y_df["y"],
---> 18 X_train=X_df
19 )
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/sktime/forecasting/arima.py in fit(self, y_train, fh, X_train, **fit_args)
363 self._set_y_X(y_train, X_train)
364 self._set_fh(fh)
--> 365 self._forecaster.fit(y_train, exogenous=X_train, **fit_args)
366 self._is_fitted = True
367 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in fit(self, y, X, **fit_args)
218 with_intercept=self.with_intercept,
219 sarimax_kwargs=sarimax_kwargs,
--> 220 **fit_args)
221
222 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in auto_arima(y, X, start_p, d, start_q, max_p, max_d, max_q, start_P, D, start_Q, max_P, max_D, max_Q, max_order, m, seasonal, stationary, information_criterion, alpha, test, seasonal_test, stepwise, n_jobs, start_params, trend, method, maxiter, offset_test_args, seasonal_test_args, suppress_warnings, error_action, trace, random, random_state, n_fits, return_valid_fits, out_of_sample_size, scoring, scoring_args, with_intercept, sarimax_kwargs, **fit_args)
390
391 # Temporary shim until we remove `exogenous` support completely
--> 392 X, fit_args = pm_compat.get_X(X, **fit_args)
393
394 # pop out the deprecated kwargs
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/compat/pmdarima.py in get_X(X, **kwargs)
12 exog = kwargs.pop("exogenous", None)
13 if X is not None and exog is not None:
---> 14 raise ValueError("Multiple values provided for both X and exogenous")
15
16 if exog is not None:
ValueError: Multiple values provided for both X and exogenous
|
ValueError
|
def check_X(X, allow_empty=False, enforce_univariate=False):
"""Validate input data.
Parameters
----------
X : pd.Series, pd.DataFrame, np.ndarray
allow_empty : bool, optional (default=False)
If True, empty `y` raises an error.
enforce_univariate : bool, optional (default=False)
If True, multivariate Z will raise an error.
Returns
-------
y : pd.Series, pd.DataFrame
Validated input data.
Raises
------
ValueError, TypeError
If y is an invalid input
UserWarning
Warning that X is given and model can't use it
"""
# Check if pandas series or numpy array
return check_series(
X,
enforce_univariate=enforce_univariate,
allow_empty=allow_empty,
allow_numpy=False,
)
|
def check_X(X, allow_empty=False, enforce_univariate=False, warn_X=False):
"""Validate input data.
Parameters
----------
X : pd.Series, pd.DataFrame, np.ndarray
allow_empty : bool, optional (default=False)
If True, empty `y` raises an error.
Returns
-------
y : pd.Series, pd.DataFrame
Validated input data.
Raises
------
ValueError, TypeError
If y is an invalid input
UserWarning
Warning that X is given and model can't use it
"""
if warn_X:
warnings.warn(
"Argument X is given but can't be used by model algorithm.", UserWarning
)
# Check if pandas series or numpy array
return check_series(
X, enforce_univariate=enforce_univariate, allow_empty=allow_empty
)
|
https://github.com/alan-turing-institute/sktime/issues/540
|
ValueError Traceback (most recent call last)
<ipython-input-3-4a75cca05732> in <module>
16 MODEL_Auto_ARIMA.fit(
17 y_train=y_df["y"],
---> 18 X_train=X_df
19 )
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/sktime/forecasting/arima.py in fit(self, y_train, fh, X_train, **fit_args)
363 self._set_y_X(y_train, X_train)
364 self._set_fh(fh)
--> 365 self._forecaster.fit(y_train, exogenous=X_train, **fit_args)
366 self._is_fitted = True
367 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in fit(self, y, X, **fit_args)
218 with_intercept=self.with_intercept,
219 sarimax_kwargs=sarimax_kwargs,
--> 220 **fit_args)
221
222 return self
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/arima/auto.py in auto_arima(y, X, start_p, d, start_q, max_p, max_d, max_q, start_P, D, start_Q, max_P, max_D, max_Q, max_order, m, seasonal, stationary, information_criterion, alpha, test, seasonal_test, stepwise, n_jobs, start_params, trend, method, maxiter, offset_test_args, seasonal_test_args, suppress_warnings, error_action, trace, random, random_state, n_fits, return_valid_fits, out_of_sample_size, scoring, scoring_args, with_intercept, sarimax_kwargs, **fit_args)
390
391 # Temporary shim until we remove `exogenous` support completely
--> 392 X, fit_args = pm_compat.get_X(X, **fit_args)
393
394 # pop out the deprecated kwargs
~/miniconda3/envs/RATP37/lib/python3.7/site-packages/pmdarima/compat/pmdarima.py in get_X(X, **kwargs)
12 exog = kwargs.pop("exogenous", None)
13 if X is not None and exog is not None:
---> 14 raise ValueError("Multiple values provided for both X and exogenous")
15
16 if exog is not None:
ValueError: Multiple values provided for both X and exogenous
|
ValueError
|
def __init__(
self,
error="add",
trend=None,
damped=False,
seasonal=None,
sp=1,
initialization_method="estimated",
initial_level=None,
initial_trend=None,
initial_seasonal=None,
bounds=None,
dates=None,
freq=None,
missing="none",
start_params=None,
maxiter=1000,
full_output=True,
disp=False,
callback=None,
return_params=False,
auto=False,
information_criterion="aic",
allow_multiplicative_trend=False,
restrict=True,
additive_only=False,
n_jobs=None,
**kwargs,
):
# Model params
self.error = error
self.trend = trend
self.damped = damped
self.seasonal = seasonal
self.sp = sp
self.initialization_method = initialization_method
self.initial_level = initial_level
self.initial_trend = initial_trend
self.initial_seasonal = initial_seasonal
self.bounds = bounds
self.dates = dates
self.freq = freq
self.missing = missing
# Fit params
self.start_params = start_params
self.maxiter = maxiter
self.full_output = full_output
self.disp = disp
self.callback = callback
self.return_params = return_params
self.information_criterion = information_criterion
self.auto = auto
self.allow_multiplicative_trend = allow_multiplicative_trend
self.restrict = restrict
self.additive_only = additive_only
self.n_jobs = n_jobs
super(AutoETS, self).__init__()
|
def __init__(
self,
error="add",
trend=None,
damped=False,
seasonal=None,
sp=None,
initialization_method="estimated",
initial_level=None,
initial_trend=None,
initial_seasonal=None,
bounds=None,
dates=None,
freq=None,
missing="none",
start_params=None,
maxiter=1000,
full_output=True,
disp=False,
callback=None,
return_params=False,
auto=False,
information_criterion="aic",
allow_multiplicative_trend=False,
restrict=True,
additive_only=False,
n_jobs=None,
**kwargs,
):
# Model params
self.error = error
self.trend = trend
self.damped = damped
self.seasonal = seasonal
self.sp = sp
self.initialization_method = initialization_method
self.initial_level = initial_level
self.initial_trend = initial_trend
self.initial_seasonal = initial_seasonal
self.bounds = bounds
self.dates = dates
self.freq = freq
self.missing = missing
# Fit params
self.start_params = start_params
self.maxiter = maxiter
self.full_output = full_output
self.disp = disp
self.callback = callback
self.return_params = return_params
self.information_criterion = information_criterion
self.auto = auto
self.allow_multiplicative_trend = allow_multiplicative_trend
self.restrict = restrict
self.additive_only = additive_only
self.n_jobs = n_jobs
super(AutoETS, self).__init__()
|
https://github.com/alan-turing-institute/sktime/issues/435
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-80-03aaf6ca0732> in <module>
----> 1 hw_auto_model.fit(y_train)
~/miniconda3/lib/python3.7/site-packages/sktime/forecasting/base/_statsmodels.py in fit(self, y_train, fh, X_train)
39 self._set_y_X(y_train, X_train)
40 self._set_fh(fh)
---> 41 self._fit_forecaster(y_train, X_train=X_train)
42 self._is_fitted = True
43 return self
~/miniconda3/lib/python3.7/site-packages/sktime/forecasting/ets.py in _fit_forecaster(self, y, X_train)
287 delayed(_fit)(error, trend, seasonal, damped)
288 for error, trend, seasonal, damped in _iter(
--> 289 error_range, trend_range, seasonal_range, damped_range
290 )
291 )
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in __call__(self, iterable)
1027 # remaining jobs.
1028 self._iterating = False
-> 1029 if self.dispatch_one_batch(iterator):
1030 self._iterating = self._original_iterator is not None
1031
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in dispatch_one_batch(self, iterator)
845 return False
846 else:
--> 847 self._dispatch(tasks)
848 return True
849
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in _dispatch(self, batch)
763 with self._lock:
764 job_idx = len(self._jobs)
--> 765 job = self._backend.apply_async(batch, callback=cb)
766 # A job can complete so quickly than its callback is
767 # called before we get here, causing self._jobs to
~/miniconda3/lib/python3.7/site-packages/joblib/_parallel_backends.py in apply_async(self, func, callback)
206 def apply_async(self, func, callback=None):
207 """Schedule a func to be run"""
--> 208 result = ImmediateResult(func)
209 if callback:
210 callback(result)
~/miniconda3/lib/python3.7/site-packages/joblib/_parallel_backends.py in __init__(self, batch)
570 # Don't delay the application, to avoid keeping the input
571 # arguments in memory
--> 572 self.results = batch()
573
574 def get(self):
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in __call__(self)
251 with parallel_backend(self._backend, n_jobs=self._n_jobs):
252 return [func(*args, **kwargs)
--> 253 for func, args, kwargs in self.items]
254
255 def __reduce__(self):
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in <listcomp>(.0)
251 with parallel_backend(self._backend, n_jobs=self._n_jobs):
252 return [func(*args, **kwargs)
--> 253 for func, args, kwargs in self.items]
254
255 def __reduce__(self):
~/miniconda3/lib/python3.7/site-packages/sktime/forecasting/ets.py in _fit(error, trend, seasonal, damped)
271 dates=self.dates,
272 freq=self.freq,
--> 273 missing=self.missing,
274 )
275 _fitted_forecaster = _forecaster.fit(
~/miniconda3/lib/python3.7/site-packages/statsmodels/tsa/exponential_smoothing/ets.py in __init__(self, endog, error, trend, damped_trend, seasonal, seasonal_periods, initialization_method, initial_level, initial_trend, initial_seasonal, bounds, dates, freq, missing)
454 )
455 if seasonal_periods is None:
--> 456 self.seasonal_periods = freq_to_period(self._index_freq)
457 if self.seasonal_periods <= 1:
458 raise ValueError("seasonal_periods must be larger than 1.")
~/miniconda3/lib/python3.7/site-packages/statsmodels/tsa/tsatools.py in freq_to_period(freq)
810 if not isinstance(freq, offsets.DateOffset):
811 freq = to_offset(freq) # go ahead and standardize
--> 812 freq = freq.rule_code.upper()
813
814 if freq == 'A' or freq.startswith(('A-', 'AS-')):
AttributeError: 'NoneType' object has no attribute 'rule_code'
|
AttributeError
|
def _fit_forecaster(self, y, X=None):
# Select model automatically
if self.auto:
# Initialise parameter ranges
error_range = ["add", "mul"]
if self.allow_multiplicative_trend:
trend_range = ["add", "mul", None]
else:
trend_range = ["add", None]
if self.sp <= 1 or self.sp is None:
seasonal_range = [None]
else:
seasonal_range = ["add", "mul", None]
damped_range = [True, False]
# Check information criterion input
if (
self.information_criterion != "aic"
and self.information_criterion != "bic"
and self.information_criterion != "aicc"
):
raise ValueError("information criterion must either be aic, bic or aicc")
# Fit model, adapted from:
# https://github.com/robjhyndman/forecast/blob/master/R/ets.R
# Initialise iterator
def _iter(error_range, trend_range, seasonal_range, damped_range):
for error, trend, seasonal, damped in product(
error_range, trend_range, seasonal_range, damped_range
):
if trend is None and damped:
continue
if self.restrict:
if error == "add" and (trend == "mul" or seasonal == "mul"):
continue
if error == "mul" and trend == "mul" and seasonal == "add":
continue
if self.additive_only and (
error == "mul" or trend == "mul" or seasonal == "mul"
):
continue
yield error, trend, seasonal, damped
# Fit function
def _fit(error, trend, seasonal, damped):
_forecaster = _ETSModel(
y,
error=error,
trend=trend,
damped_trend=damped,
seasonal=seasonal,
seasonal_periods=self.sp,
initialization_method=self.initialization_method,
initial_level=self.initial_level,
initial_trend=self.initial_trend,
initial_seasonal=self.initial_seasonal,
bounds=self.bounds,
dates=self.dates,
freq=self.freq,
missing=self.missing,
)
_fitted_forecaster = _forecaster.fit(
start_params=self.start_params,
maxiter=self.maxiter,
full_output=self.full_output,
disp=self.disp,
callback=self.callback,
return_params=self.return_params,
)
return _forecaster, _fitted_forecaster
# Fit models
_fitted_results = Parallel(n_jobs=self.n_jobs)(
delayed(_fit)(error, trend, seasonal, damped)
for error, trend, seasonal, damped in _iter(
error_range, trend_range, seasonal_range, damped_range
)
)
# Select best model based on information criterion
# Get index of best model
_index = np.argmin(
[
getattr(result[1], self.information_criterion)
for result in _fitted_results
]
)
# Update best model
self._forecaster = _fitted_results[_index][0]
self._fitted_forecaster = _fitted_results[_index][1]
else:
self._forecaster = _ETSModel(
y,
error=self.error,
trend=self.trend,
damped_trend=self.damped,
seasonal=self.seasonal,
seasonal_periods=self.sp,
initialization_method=self.initialization_method,
initial_level=self.initial_level,
initial_trend=self.initial_trend,
initial_seasonal=self.initial_seasonal,
bounds=self.bounds,
dates=self.dates,
freq=self.freq,
missing=self.missing,
)
self._fitted_forecaster = self._forecaster.fit(
start_params=self.start_params,
maxiter=self.maxiter,
full_output=self.full_output,
disp=self.disp,
callback=self.callback,
return_params=self.return_params,
)
|
def _fit_forecaster(self, y, X=None):
# Select model automatically
if self.auto:
# Initialise parameter ranges
error_range = ["add", "mul"]
if self.allow_multiplicative_trend:
trend_range = ["add", "mul", None]
else:
trend_range = ["add", None]
seasonal_range = ["add", "mul", None]
damped_range = [True, False]
# Check information criterion input
if (
self.information_criterion != "aic"
and self.information_criterion != "bic"
and self.information_criterion != "aicc"
):
raise ValueError("information criterion must either be aic, bic or aicc")
# Fit model, adapted from:
# https://github.com/robjhyndman/forecast/blob/master/R/ets.R
# Initialise iterator
def _iter(error_range, trend_range, seasonal_range, damped_range):
for error, trend, seasonal, damped in product(
error_range, trend_range, seasonal_range, damped_range
):
if trend is None and damped:
continue
if self.restrict:
if error == "add" and (trend == "mul" or seasonal == "mul"):
continue
if error == "mul" and trend == "mul" and seasonal == "add":
continue
if self.additive_only and (
error == "mul" or trend == "mul" or seasonal == "mul"
):
continue
yield error, trend, seasonal, damped
# Fit function
def _fit(error, trend, seasonal, damped):
_forecaster = _ETSModel(
y,
error=error,
trend=trend,
damped_trend=damped,
seasonal=seasonal,
seasonal_periods=self.sp,
initialization_method=self.initialization_method,
initial_level=self.initial_level,
initial_trend=self.initial_trend,
initial_seasonal=self.initial_seasonal,
bounds=self.bounds,
dates=self.dates,
freq=self.freq,
missing=self.missing,
)
_fitted_forecaster = _forecaster.fit(
start_params=self.start_params,
maxiter=self.maxiter,
full_output=self.full_output,
disp=self.disp,
callback=self.callback,
return_params=self.return_params,
)
return _forecaster, _fitted_forecaster
# Fit models
_fitted_results = Parallel(n_jobs=self.n_jobs)(
delayed(_fit)(error, trend, seasonal, damped)
for error, trend, seasonal, damped in _iter(
error_range, trend_range, seasonal_range, damped_range
)
)
# Select best model based on information criterion
# Get index of best model
_index = np.argmin(
[
getattr(result[1], self.information_criterion)
for result in _fitted_results
]
)
# Update best model
self._forecaster = _fitted_results[_index][0]
self._fitted_forecaster = _fitted_results[_index][1]
else:
self._forecaster = _ETSModel(
y,
error=self.error,
trend=self.trend,
damped_trend=self.damped,
seasonal=self.seasonal,
seasonal_periods=self.sp,
initialization_method=self.initialization_method,
initial_level=self.initial_level,
initial_trend=self.initial_trend,
initial_seasonal=self.initial_seasonal,
bounds=self.bounds,
dates=self.dates,
freq=self.freq,
missing=self.missing,
)
self._fitted_forecaster = self._forecaster.fit(
start_params=self.start_params,
maxiter=self.maxiter,
full_output=self.full_output,
disp=self.disp,
callback=self.callback,
return_params=self.return_params,
)
|
https://github.com/alan-turing-institute/sktime/issues/435
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-80-03aaf6ca0732> in <module>
----> 1 hw_auto_model.fit(y_train)
~/miniconda3/lib/python3.7/site-packages/sktime/forecasting/base/_statsmodels.py in fit(self, y_train, fh, X_train)
39 self._set_y_X(y_train, X_train)
40 self._set_fh(fh)
---> 41 self._fit_forecaster(y_train, X_train=X_train)
42 self._is_fitted = True
43 return self
~/miniconda3/lib/python3.7/site-packages/sktime/forecasting/ets.py in _fit_forecaster(self, y, X_train)
287 delayed(_fit)(error, trend, seasonal, damped)
288 for error, trend, seasonal, damped in _iter(
--> 289 error_range, trend_range, seasonal_range, damped_range
290 )
291 )
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in __call__(self, iterable)
1027 # remaining jobs.
1028 self._iterating = False
-> 1029 if self.dispatch_one_batch(iterator):
1030 self._iterating = self._original_iterator is not None
1031
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in dispatch_one_batch(self, iterator)
845 return False
846 else:
--> 847 self._dispatch(tasks)
848 return True
849
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in _dispatch(self, batch)
763 with self._lock:
764 job_idx = len(self._jobs)
--> 765 job = self._backend.apply_async(batch, callback=cb)
766 # A job can complete so quickly than its callback is
767 # called before we get here, causing self._jobs to
~/miniconda3/lib/python3.7/site-packages/joblib/_parallel_backends.py in apply_async(self, func, callback)
206 def apply_async(self, func, callback=None):
207 """Schedule a func to be run"""
--> 208 result = ImmediateResult(func)
209 if callback:
210 callback(result)
~/miniconda3/lib/python3.7/site-packages/joblib/_parallel_backends.py in __init__(self, batch)
570 # Don't delay the application, to avoid keeping the input
571 # arguments in memory
--> 572 self.results = batch()
573
574 def get(self):
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in __call__(self)
251 with parallel_backend(self._backend, n_jobs=self._n_jobs):
252 return [func(*args, **kwargs)
--> 253 for func, args, kwargs in self.items]
254
255 def __reduce__(self):
~/miniconda3/lib/python3.7/site-packages/joblib/parallel.py in <listcomp>(.0)
251 with parallel_backend(self._backend, n_jobs=self._n_jobs):
252 return [func(*args, **kwargs)
--> 253 for func, args, kwargs in self.items]
254
255 def __reduce__(self):
~/miniconda3/lib/python3.7/site-packages/sktime/forecasting/ets.py in _fit(error, trend, seasonal, damped)
271 dates=self.dates,
272 freq=self.freq,
--> 273 missing=self.missing,
274 )
275 _fitted_forecaster = _forecaster.fit(
~/miniconda3/lib/python3.7/site-packages/statsmodels/tsa/exponential_smoothing/ets.py in __init__(self, endog, error, trend, damped_trend, seasonal, seasonal_periods, initialization_method, initial_level, initial_trend, initial_seasonal, bounds, dates, freq, missing)
454 )
455 if seasonal_periods is None:
--> 456 self.seasonal_periods = freq_to_period(self._index_freq)
457 if self.seasonal_periods <= 1:
458 raise ValueError("seasonal_periods must be larger than 1.")
~/miniconda3/lib/python3.7/site-packages/statsmodels/tsa/tsatools.py in freq_to_period(freq)
810 if not isinstance(freq, offsets.DateOffset):
811 freq = to_offset(freq) # go ahead and standardize
--> 812 freq = freq.rule_code.upper()
813
814 if freq == 'A' or freq.startswith(('A-', 'AS-')):
AttributeError: 'NoneType' object has no attribute 'rule_code'
|
AttributeError
|
def predict_proba(self, X):
"""Predict class probabilities for X.
The predicted class probabilities of an input sample are computed as
the mean predicted class probabilities of the trees in the forest. The
class probability of a single tree is the fraction of samples of the same
class in a leaf.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
check_is_fitted(self, "estimators_")
# Check data
if self.check_input:
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
all_proba = Parallel(n_jobs=n_jobs, verbose=self.verbose)(
delayed(e.predict_proba)(X) for e in self.estimators_
)
all_proba = np.sum(all_proba, axis=0) / len(self.estimators_)
return all_proba
|
def predict_proba(self, X):
"""Predict class probabilities for X.
The predicted class probabilities of an input sample are computed as
the mean predicted class probabilities of the trees in the forest. The
class probability of a single tree is the fraction of samples of the same
class in a leaf.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
check_is_fitted(self, "estimators_")
# Check data
if self.check_input:
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
all_proba = Parallel(n_jobs=n_jobs, verbose=self.verbose)(
delayed(e.predict_proba)(X) for e in self.estimators_
)
all_proba = np.sum(all_proba, axis=0) / len(self.estimators_)
if len(all_proba) == 1:
return all_proba[0]
else:
return all_proba
|
https://github.com/alan-turing-institute/sktime/issues/86
|
---------------------------------------------------------------------------
AxisError Traceback (most recent call last)
<ipython-input-56-a8b067ca3dcd> in <module>
----> 1 clf.predict(X_test.iloc[0:1, :])
~/.conda/envs/py3/lib/python3.7/site-packages/sklearn/utils/metaestimators.py in <lambda>(*args, **kwargs)
114
115 # lambda, but not partial, allows help() to work with update_wrapper
--> 116 out = lambda *args, **kwargs: self.fn(obj, *args, **kwargs)
117 # update the docstring of the returned function
118 update_wrapper(out, self.fn)
~/.conda/envs/py3/lib/python3.7/site-packages/sklearn/pipeline.py in predict(self, X, **predict_params)
420 for _, name, transform in self._iter(with_final=False):
421 Xt = transform.transform(Xt)
--> 422 return self.steps[-1][-1].predict(Xt, **predict_params)
423
424 @if_delegate_has_method(delegate='_final_estimator')
~/.conda/envs/py3/lib/python3.7/site-packages/sklearn/ensemble/forest.py in predict(self, X)
546
547 if self.n_outputs_ == 1:
--> 548 return self.classes_.take(np.argmax(proba, axis=1), axis=0)
549
550 else:
~/.conda/envs/py3/lib/python3.7/site-packages/numpy/core/fromnumeric.py in argmax(a, axis, out)
1101
1102 """
-> 1103 return _wrapfunc(a, 'argmax', axis=axis, out=out)
1104
1105
~/.conda/envs/py3/lib/python3.7/site-packages/numpy/core/fromnumeric.py in _wrapfunc(obj, method, *args, **kwds)
54 def _wrapfunc(obj, method, *args, **kwds):
55 try:
---> 56 return getattr(obj, method)(*args, **kwds)
57
58 # An AttributeError occurs if the object does not have
AxisError: axis 1 is out of bounds for array of dimension 1
|
AxisError
|
def build(self, parent_step=None, force_sequence=None):
"""Build a factory instance."""
# TODO: Handle "batch build" natively
pre, post = parse_declarations(
self.extras,
base_pre=self.factory_meta.pre_declarations,
base_post=self.factory_meta.post_declarations,
)
if force_sequence is not None:
sequence = force_sequence
elif self.force_init_sequence is not None:
sequence = self.force_init_sequence
else:
sequence = self.factory_meta.next_sequence()
step = BuildStep(
builder=self,
sequence=sequence,
parent_step=parent_step,
)
step.resolve(pre)
args, kwargs = self.factory_meta.prepare_arguments(step.attributes)
instance = self.factory_meta.instantiate(
step=step,
args=args,
kwargs=kwargs,
)
postgen_results = {}
for declaration_name in post.sorted():
declaration = post[declaration_name]
unrolled_context = declaration.declaration.unroll_context(
instance=instance,
step=step,
context=declaration.context,
)
postgen_context = PostGenerationContext(
value_provided="" in unrolled_context,
value=unrolled_context.get(""),
extra={k: v for k, v in unrolled_context.items() if k != ""},
)
postgen_results[declaration_name] = declaration.declaration.call(
instance=instance,
step=step,
context=postgen_context,
)
self.factory_meta.use_postgeneration_results(
instance=instance,
step=step,
results=postgen_results,
)
return instance
|
def build(self, parent_step=None, force_sequence=None):
"""Build a factory instance."""
# TODO: Handle "batch build" natively
pre, post = parse_declarations(
self.extras,
base_pre=self.factory_meta.pre_declarations,
base_post=self.factory_meta.post_declarations,
)
if force_sequence is not None:
sequence = force_sequence
elif self.force_init_sequence is not None:
sequence = self.force_init_sequence
else:
sequence = self.factory_meta.next_sequence()
step = BuildStep(
builder=self,
sequence=sequence,
parent_step=parent_step,
)
step.resolve(pre)
args, kwargs = self.factory_meta.prepare_arguments(step.attributes)
instance = self.factory_meta.instantiate(
step=step,
args=args,
kwargs=kwargs,
)
postgen_results = {}
for declaration_name in post.sorted():
declaration = post[declaration_name]
postgen_context = PostGenerationContext(
value_provided="" in declaration.context,
value=declaration.context.get(""),
extra={k: v for k, v in declaration.context.items() if k != ""},
)
postgen_results[declaration_name] = declaration.declaration.call(
instance=instance,
step=step,
context=postgen_context,
)
self.factory_meta.use_postgeneration_results(
instance=instance,
step=step,
results=postgen_results,
)
return instance
|
https://github.com/FactoryBoy/factory_boy/issues/466
|
Traceback (most recent call last):
File "/root/docs/notes/gist/factoryboy_maybe_broke_postgen.py", line 55, in <module>
value = CopyTargetFactory()
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/base.py", line 46, in __call__
return cls.create(**kwargs)
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/base.py", line 563, in create
return cls._generate(enums.CREATE_STRATEGY, kwargs)
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/base.py", line 500, in _generate
return step.build()
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/builder.py", line 293, in build
context=postgen_context,
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/declarations.py", line 472, in call
context=context,
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/declarations.py", line 610, in call
instance, create, context.value, **context.extra)
File "/root/docs/notes/gist/factoryboy_maybe_broke_postgen.py", line 50, in plain_m2m
for _ in xrange(num)))
TypeError: an integer is required
|
TypeError
|
def __getattr__(self, name):
"""Retrieve an attribute's value.
This will compute it if needed, unless it is already on the list of
attributes being computed.
"""
if name in self.__pending:
raise errors.CyclicDefinitionError(
"Cyclic lazy attribute definition for %r; cycle found in %r."
% (name, self.__pending)
)
elif name in self.__values:
return self.__values[name]
elif name in self.__declarations:
declaration = self.__declarations[name]
value = declaration.declaration
if enums.get_builder_phase(value) == enums.BuilderPhase.ATTRIBUTE_RESOLUTION:
self.__pending.append(name)
try:
context = value.unroll_context(
instance=self,
step=self.__step,
context=declaration.context,
)
value = value.evaluate(
instance=self,
step=self.__step,
extra=context,
)
finally:
last = self.__pending.pop()
assert name == last
self.__values[name] = value
return value
else:
raise AttributeError(
"The parameter %r is unknown. Evaluated attributes are %r, "
"definitions are %r." % (name, self.__values, self.__declarations)
)
|
def __getattr__(self, name):
"""Retrieve an attribute's value.
This will compute it if needed, unless it is already on the list of
attributes being computed.
"""
if name in self.__pending:
raise errors.CyclicDefinitionError(
"Cyclic lazy attribute definition for %r; cycle found in %r."
% (name, self.__pending)
)
elif name in self.__values:
return self.__values[name]
elif name in self.__declarations:
declaration = self.__declarations[name]
value = declaration.declaration
if enums.get_builder_phase(value) == enums.BuilderPhase.ATTRIBUTE_RESOLUTION:
self.__pending.append(name)
try:
value = value.evaluate(
instance=self,
step=self.__step,
extra=declaration.context,
)
finally:
last = self.__pending.pop()
assert name == last
self.__values[name] = value
return value
else:
raise AttributeError(
"The parameter %r is unknown. Evaluated attributes are %r, "
"definitions are %r." % (name, self.__values, self.__declarations)
)
|
https://github.com/FactoryBoy/factory_boy/issues/466
|
Traceback (most recent call last):
File "/root/docs/notes/gist/factoryboy_maybe_broke_postgen.py", line 55, in <module>
value = CopyTargetFactory()
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/base.py", line 46, in __call__
return cls.create(**kwargs)
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/base.py", line 563, in create
return cls._generate(enums.CREATE_STRATEGY, kwargs)
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/base.py", line 500, in _generate
return step.build()
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/builder.py", line 293, in build
context=postgen_context,
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/declarations.py", line 472, in call
context=context,
File "/root/.pyenv/versions/frontend/lib/python2.7/site-packages/factory/declarations.py", line 610, in call
instance, create, context.value, **context.extra)
File "/root/docs/notes/gist/factoryboy_maybe_broke_postgen.py", line 50, in plain_m2m
for _ in xrange(num)))
TypeError: an integer is required
|
TypeError
|
def get_version(package_name):
version_re = re.compile(r"^__version__ = [\"']([\w_.-]+)[\"']$")
package_components = package_name.split(".")
init_path = os.path.join(root_dir, *(package_components + ["__init__.py"]))
with codecs.open(init_path, "r", "utf-8") as f:
for line in f:
match = version_re.match(line[:-1])
if match:
return match.groups()[0]
return "0.1.0"
|
def get_version(package_name):
version_re = re.compile(r"^__version__ = [\"']([\w_.-]+)[\"']$")
package_components = package_name.split(".")
path_components = package_components + ["__init__.py"]
with open(os.path.join(root_dir, *path_components)) as f:
for line in f:
match = version_re.match(line[:-1])
if match:
return match.groups()[0]
return "0.1.0"
|
https://github.com/FactoryBoy/factory_boy/issues/118
|
(.env)# pip install factory_boy
Downloading/unpacking factory-boy
Downloading factory_boy-2.2.1.tar.gz (71kB): 71kB downloaded
Running setup.py egg_info for package factory-boy
Traceback (most recent call last):
File "<string>", line 16, in <module>
File "/home/cdp/.env/lib/python3.3/encodings/ascii.py", line 26, in decode
return codecs.ascii_decode(input, self.errors)[0]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 1018: ordinal not in range(128)
Complete output from command python setup.py egg_info:
Traceback (most recent call last):
File "<string>", line 16, in <module>
File "/home/cdp/.env/lib/python3.3/encodings/ascii.py", line 26, in decode
return codecs.ascii_decode(input, self.errors)[0]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 1018: ordinal not in range(128)
----------------------------------------
Cleaning up...
Command python setup.py egg_info failed with error code 1 in /home/cdp/.env/build/factory-boy
Storing complete log in /root/.pip/pip.log
|
UnicodeDecodeError
|
def __init__(self, **kwargs):
super().__init__(**kwargs)
# add events for each field
self._events.source = self
self._events.add(**dict.fromkeys(self.__fields__))
|
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# add events for each field
self._events.source = self
self._events.add(**dict.fromkeys(self.__fields__))
|
https://github.com/napari/napari/issues/2264
|
In [1]: from pydantic import BaseModel
In [2]: import inspect
In [3]: class T(BaseModel):
...: x: int
...: y: str = 'sadf'
...:
# so far so good
In [4]: inspect.signature(T)
Out[4]: <Signature (*, x: int, y: str = 'sadf') -> None>
In [5]: import PySide2
In [6]: inspect.signature(T)
Out[6]: <Signature (**data: typing.Any) -> None> # ??
# let's try that again
In [7]: class T(BaseModel):
...: x: int
...: y: str = 'sadf'
...:
In [8]: inspect.signature(T) # ???
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-8-e75fbef4a311> in <module>
----> 1 inspect.signature(T)
~/miniconda3/envs/napdev/lib/python3.8/inspect.py in signature(obj, follow_wrapped)
3091 def signature(obj, *, follow_wrapped=True):
3092 """Get a signature object for the passed callable."""
-> 3093 return Signature.from_callable(obj, follow_wrapped=follow_wrapped)
3094
3095
~/miniconda3/envs/napdev/lib/python3.8/inspect.py in from_callable(cls, obj, follow_wrapped)
2840 def from_callable(cls, obj, *, follow_wrapped=True):
2841 """Constructs Signature for the given callable object."""
-> 2842 return _signature_from_callable(obj, sigcls=cls,
2843 follow_wrapper_chains=follow_wrapped)
2844
~/miniconda3/envs/napdev/lib/python3.8/inspect.py in _signature_from_callable(obj, follow_wrapper_chains, skip_bound_arg, sigcls)
2250 if sig is not None:
2251 if not isinstance(sig, Signature):
-> 2252 raise TypeError(
2253 'unexpected object {!r} in __signature__ '
2254 'attribute'.format(sig))
TypeError: unexpected object <pydantic.utils.ClassAttribute object at 0x7fe9b925d4f0> in __signature__ attribute
|
TypeError
|
def __new__(mcs, name, bases, namespace, **kwargs):
with no_class_attributes():
cls = super().__new__(mcs, name, bases, namespace, **kwargs)
cls.__eq_operators__ = {
n: pick_equality_operator(f.type_) for n, f in cls.__fields__.items()
}
return cls
|
def __new__(mcs, name, bases, namespace, **kwargs):
cls = super().__new__(mcs, name, bases, namespace, **kwargs)
cls.__eq_operators__ = {
n: pick_equality_operator(f.type_) for n, f in cls.__fields__.items()
}
return cls
|
https://github.com/napari/napari/issues/2264
|
In [1]: from pydantic import BaseModel
In [2]: import inspect
In [3]: class T(BaseModel):
...: x: int
...: y: str = 'sadf'
...:
# so far so good
In [4]: inspect.signature(T)
Out[4]: <Signature (*, x: int, y: str = 'sadf') -> None>
In [5]: import PySide2
In [6]: inspect.signature(T)
Out[6]: <Signature (**data: typing.Any) -> None> # ??
# let's try that again
In [7]: class T(BaseModel):
...: x: int
...: y: str = 'sadf'
...:
In [8]: inspect.signature(T) # ???
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-8-e75fbef4a311> in <module>
----> 1 inspect.signature(T)
~/miniconda3/envs/napdev/lib/python3.8/inspect.py in signature(obj, follow_wrapped)
3091 def signature(obj, *, follow_wrapped=True):
3092 """Get a signature object for the passed callable."""
-> 3093 return Signature.from_callable(obj, follow_wrapped=follow_wrapped)
3094
3095
~/miniconda3/envs/napdev/lib/python3.8/inspect.py in from_callable(cls, obj, follow_wrapped)
2840 def from_callable(cls, obj, *, follow_wrapped=True):
2841 """Constructs Signature for the given callable object."""
-> 2842 return _signature_from_callable(obj, sigcls=cls,
2843 follow_wrapper_chains=follow_wrapped)
2844
~/miniconda3/envs/napdev/lib/python3.8/inspect.py in _signature_from_callable(obj, follow_wrapper_chains, skip_bound_arg, sigcls)
2250 if sig is not None:
2251 if not isinstance(sig, Signature):
-> 2252 raise TypeError(
2253 'unexpected object {!r} in __signature__ '
2254 'attribute'.format(sig))
TypeError: unexpected object <pydantic.utils.ClassAttribute object at 0x7fe9b925d4f0> in __signature__ attribute
|
TypeError
|
def bundle():
clean()
if MACOS:
patch_dmgbuild()
# smoke test, and build resources
subprocess.check_call([sys.executable, "-m", APP, "--info"])
with patched_toml():
# create
cmd = ["briefcase", "create"] + (["--no-docker"] if LINUX else [])
subprocess.check_call(cmd)
time.sleep(0.5)
add_site_packages_to_path()
if WINDOWS:
patch_wxs()
# build
cmd = ["briefcase", "build"] + (["--no-docker"] if LINUX else [])
subprocess.check_call(cmd)
# package
cmd = ["briefcase", "package"]
cmd += ["--no-sign"] if MACOS else (["--no-docker"] if LINUX else [])
subprocess.check_call(cmd)
# compress
dest = make_zip()
clean()
return dest
|
def bundle():
clean()
if MACOS:
patch_dmgbuild()
# smoke test, and build resources
subprocess.check_call([sys.executable, "-m", APP, "--info"])
patch_toml()
# create
cmd = ["briefcase", "create"] + (["--no-docker"] if LINUX else [])
subprocess.check_call(cmd)
time.sleep(0.5)
add_site_packages_to_path()
if WINDOWS:
patch_wxs()
# build
cmd = ["briefcase", "build"] + (["--no-docker"] if LINUX else [])
subprocess.check_call(cmd)
# package
cmd = ["briefcase", "package"]
cmd += ["--no-sign"] if MACOS else (["--no-docker"] if LINUX else [])
subprocess.check_call(cmd)
# compress
dest = make_zip()
clean()
with open(PYPROJECT_TOML, "w") as f:
f.write(original_toml)
return dest
|
https://github.com/napari/napari/issues/2022
|
C:\Users\rober>C:\Users\rober\AppData\Local\Programs\napari\python\python.exe -m napari
** On entry to DGEBAL parameter number 3 had an illegal value
** On entry to DGEHRD parameter number 2 had an illegal value
** On entry to DORGHR DORGQR parameter number 2 had an illegal value
** On entry to DHSEQR parameter number 4 had an illegal value
** On entry to DGEBAL parameter number 3 had an illegal value
** On entry to DGEHRD parameter number 2 had an illegal value
** On entry to DORGHR DORGQR parameter number 2 had an illegal value
** On entry to DHSEQR parameter number 4 had an illegal value
Traceback (most recent call last):
File "runpy.py", line 185, in _run_module_as_main
File "runpy.py", line 144, in _get_module_details
File "runpy.py", line 111, in _get_module_details
File "C:\Users\rober\AppData\Local\Programs\napari\\app\napari\__init__.py", line 48, in <module>
from vispy import app
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\vispy\__init__.py", line 30, in <module>
from .util import config, set_log_level, keys, sys_info # noqa
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\vispy\util\__init__.py", line 9, in <module>
from .logs import logger, set_log_level, use_log_level # noqa
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\vispy\util\logs.py", line 14, in <module>
import numpy as np
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\numpy\__init__.py", line 305, in <module>
_win_os_check()
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\numpy\__init__.py", line 302, in _win_os_check
raise RuntimeError(msg.format(__file__)) from None
RuntimeError: The current Numpy installation ('C:\\Users\\rober\\AppData\\Local\\Programs\\napari\\\\app_packages\\numpy\\__init__.py') fails to pass a sanity check due to a bug in the windows runtime. See this issue for more information: https://tinyurl.com/y3dm3h86
|
RuntimeError
|
def _find_rcc_or_raise() -> str:
"""Locate the Qt rcc binary to generate resource files
1. we always want to use pyrcc5 if it's available, regardless of API
2. it will sometimes, (if not always) be named pyrcc5.bat on windows...
but shutil.which() will find that too
3. We also want to prefer binaries higher up on the path, and we add
sys.executable to the front of the path (and \\Scripts on windows)
4. after pyrcc5 we try pyside2-rcc
see https://github.com/napari/napari/issues/1221
and https://github.com/napari/napari/issues/1254
Returns
-------
path : str
Path to the located rcc binary, or None if not found
Raises
------
FileNotFoundError
If no executable can be found.
"""
python_dir = os.path.dirname(sys.executable)
paths = [python_dir, os.environ.get("PATH", "")]
if os.name == "nt":
paths.insert(0, os.path.join(python_dir, "Scripts"))
# inject bundle binary path if it exists
bundle_bin = bundle_bin_dir()
if bundle_bin:
paths.insert(0, bundle_bin)
path = os.pathsep.join(paths)
for bin_name in ("pyrcc5", "pyside2-rcc"):
rcc_binary = shutil.which(bin_name, path=path)
if rcc_binary:
yield rcc_binary
raise FileNotFoundError(
"Unable to find an executable to build Qt resources (icons).\n"
"Tried: 'pyrcc5.bat', 'pyrcc5', 'pyside2-rcc'.\n"
"Please open issue at https://github.com/napari/napari/issues/."
)
|
def _find_rcc_or_raise() -> str:
"""Locate the Qt rcc binary to generate resource files
1. we always want to use pyrcc5 if it's available, regardless of API
2. it will sometimes, (if not always) be named pyrcc5.bat on windows...
but shutil.which() will find that too
3. We also want to prefer binaries higher up on the path, and we add
sys.executable to the front of the path (and \\Scripts on windows)
4. after pyrcc5 we try pyside2-rcc
see https://github.com/napari/napari/issues/1221
and https://github.com/napari/napari/issues/1254
Returns
-------
path : str
Path to the located rcc binary, or None if not found
Raises
------
FileNotFoundError
If no executable can be found.
"""
python_dir = os.path.dirname(sys.executable)
paths = [python_dir, os.environ.get("PATH", "")]
if os.name == "nt":
paths.insert(0, os.path.join(python_dir, "Scripts"))
path = os.pathsep.join(paths)
for bin_name in ("pyrcc5", "pyside2-rcc"):
rcc_binary = shutil.which(bin_name, path=path)
if rcc_binary:
yield rcc_binary
raise FileNotFoundError(
"Unable to find an executable to build Qt resources (icons).\n"
"Tried: 'pyrcc5.bat', 'pyrcc5', 'pyside2-rcc'.\n"
"Please open issue at https://github.com/napari/napari/issues/."
)
|
https://github.com/napari/napari/issues/2022
|
C:\Users\rober>C:\Users\rober\AppData\Local\Programs\napari\python\python.exe -m napari
** On entry to DGEBAL parameter number 3 had an illegal value
** On entry to DGEHRD parameter number 2 had an illegal value
** On entry to DORGHR DORGQR parameter number 2 had an illegal value
** On entry to DHSEQR parameter number 4 had an illegal value
** On entry to DGEBAL parameter number 3 had an illegal value
** On entry to DGEHRD parameter number 2 had an illegal value
** On entry to DORGHR DORGQR parameter number 2 had an illegal value
** On entry to DHSEQR parameter number 4 had an illegal value
Traceback (most recent call last):
File "runpy.py", line 185, in _run_module_as_main
File "runpy.py", line 144, in _get_module_details
File "runpy.py", line 111, in _get_module_details
File "C:\Users\rober\AppData\Local\Programs\napari\\app\napari\__init__.py", line 48, in <module>
from vispy import app
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\vispy\__init__.py", line 30, in <module>
from .util import config, set_log_level, keys, sys_info # noqa
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\vispy\util\__init__.py", line 9, in <module>
from .logs import logger, set_log_level, use_log_level # noqa
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\vispy\util\logs.py", line 14, in <module>
import numpy as np
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\numpy\__init__.py", line 305, in <module>
_win_os_check()
File "C:\Users\rober\AppData\Local\Programs\napari\\app_packages\numpy\__init__.py", line 302, in _win_os_check
raise RuntimeError(msg.format(__file__)) from None
RuntimeError: The current Numpy installation ('C:\\Users\\rober\\AppData\\Local\\Programs\\napari\\\\app_packages\\numpy\\__init__.py') fails to pass a sanity check due to a bug in the windows runtime. See this issue for more information: https://tinyurl.com/y3dm3h86
|
RuntimeError
|
def split_channels(
data: np.ndarray,
channel_axis: int,
**kwargs,
) -> List[FullLayerData]:
"""Split the data array into separate arrays along an axis.
Keyword arguments will override any parameters altered or set in this
function. Colormap, blending, or multiscale are set as follows if not
overridden by a keyword:
- colormap : (magenta, green) for 2 channels, (CYMRGB) for more than 2
- blending : additive
- multiscale : determined by layers.image._image_utils.guess_multiscale.
Colormap, blending and multiscale will be set and returned in meta if not in kwargs.
If any other key is not present in kwargs it will not be returned in the meta
dictionary of the returned LaterData tuple. For example, if gamma is not in
kwargs then meta will not have a gamma key.
Parameters
----------
data : array or list of array
channel_axis : int
Axis to split the image along.
kwargs: dict
Keyword arguments will override the default image meta keys
returned in each layer data tuple.
Returns
-------
List of LayerData tuples: [(data: array, meta: Dict, type: str )]
"""
# Determine if data is a multiscale
multiscale = kwargs.get("multiscale")
if not multiscale:
multiscale, data = guess_multiscale(data)
kwargs["multiscale"] = multiscale
n_channels = (data[0] if multiscale else data).shape[channel_axis]
kwargs["blending"] = kwargs.get("blending") or "additive"
kwargs.setdefault("colormap", None)
# these arguments are *already* iterables in the single-channel case.
iterable_kwargs = {
"scale",
"translate",
"affine",
"contrast_limits",
"metadata",
}
# turn the kwargs dict into a mapping of {key: iterator}
# so that we can use {k: next(v) for k, v in kwargs.items()} below
for key, val in kwargs.items():
if key == "colormap" and val is None:
if n_channels == 1:
kwargs[key] = iter(["gray"])
elif n_channels == 2:
kwargs[key] = iter(MAGENTA_GREEN)
else:
kwargs[key] = itertools.cycle(CYMRGB)
# make sure that iterable_kwargs are a *sequence* of iterables
# for the multichannel case. For example: if scale == (1, 2) &
# n_channels = 3, then scale should == [(1, 2), (1, 2), (1, 2)]
elif key in iterable_kwargs or (
key == "colormap" and isinstance(val, Colormap)
):
kwargs[key] = iter(ensure_sequence_of_iterables(val, n_channels))
else:
kwargs[key] = iter(ensure_iterable(val))
layerdata_list = list()
for i in range(n_channels):
if multiscale:
image = [np.take(data[j], i, axis=channel_axis) for j in range(len(data))]
else:
image = np.take(data, i, axis=channel_axis)
i_kwargs = {}
for key, val in kwargs.items():
try:
i_kwargs[key] = next(val)
except StopIteration:
raise IndexError(
"Error adding multichannel image with data shape "
f"{data.shape!r}.\nRequested channel_axis "
f"({channel_axis}) had length {n_channels}, but "
f"the '{key}' argument only provided {i} values. "
)
layerdata = (image, i_kwargs, "image")
layerdata_list.append(layerdata)
return layerdata_list
|
def split_channels(
data: np.ndarray,
channel_axis: int,
**kwargs,
) -> List[FullLayerData]:
"""Split the data array into separate arrays along an axis.
Keyword arguments will override any parameters altered or set in this
function. Colormap, blending, or multiscale are set as follows if not
overridden by a keyword:
- colormap : (magenta, green) for 2 channels, (CYMRGB) for more than 2
- blending : additive
- multiscale : determined by layers.image._image_utils.guess_multiscale.
Colormap, blending and multiscale will be set and returned in meta if not in kwargs.
If any other key is not present in kwargs it will not be returned in the meta
dictionary of the returned LaterData tuple. For example, if gamma is not in
kwargs then meta will not have a gamma key.
Parameters
----------
data : array or list of array
channel_axis : int
Axis to split the image along.
kwargs: dict
Keyword arguments will override the default image meta keys
returned in each layer data tuple.
Returns
-------
List of LayerData tuples: [(data: array, meta: Dict, type: str )]
"""
# Determine if data is a multiscale
multiscale = kwargs.get("multiscale")
if not multiscale:
multiscale, data = guess_multiscale(data)
kwargs["multiscale"] = multiscale
n_channels = (data[0] if multiscale else data).shape[channel_axis]
kwargs["blending"] = kwargs.get("blending") or "additive"
kwargs.setdefault("colormap", None)
# these arguments are *already* iterables in the single-channel case.
iterable_kwargs = {"scale", "translate", "contrast_limits", "metadata"}
# turn the kwargs dict into a mapping of {key: iterator}
# so that we can use {k: next(v) for k, v in kwargs.items()} below
for key, val in kwargs.items():
if key == "colormap" and val is None:
if n_channels == 1:
kwargs[key] = iter(["gray"])
elif n_channels == 2:
kwargs[key] = iter(MAGENTA_GREEN)
else:
kwargs[key] = itertools.cycle(CYMRGB)
# make sure that iterable_kwargs are a *sequence* of iterables
# for the multichannel case. For example: if scale == (1, 2) &
# n_channels = 3, then scale should == [(1, 2), (1, 2), (1, 2)]
elif key in iterable_kwargs or (
key == "colormap" and isinstance(val, Colormap)
):
kwargs[key] = iter(ensure_sequence_of_iterables(val, n_channels))
else:
kwargs[key] = iter(ensure_iterable(val))
layerdata_list = list()
for i in range(n_channels):
if multiscale:
image = [np.take(data[j], i, axis=channel_axis) for j in range(len(data))]
else:
image = np.take(data, i, axis=channel_axis)
i_kwargs = {}
for key, val in kwargs.items():
try:
i_kwargs[key] = next(val)
except StopIteration:
raise IndexError(
"Error adding multichannel image with data shape "
f"{data.shape!r}.\nRequested channel_axis "
f"({channel_axis}) had length {n_channels}, but "
f"the '{key}' argument only provided {i} values. "
)
layerdata = (image, i_kwargs, "image")
layerdata_list.append(layerdata)
return layerdata_list
|
https://github.com/napari/napari/issues/2024
|
ERROR:root:Unhandled exception:
Traceback (most recent call last):
File "C:\miniconda3\envs\bc\lib\site-packages\napari\_qt\event_loop.py", line 79, in gui_qt
yield app
File "<ipython-input-185-65466d9eb63b>", line 15, in <module>
viewer.add_image(
File "C:\miniconda3\envs\bc\lib\site-packages\napari\components\viewer_model.py", line 776, in add_image
layer = self.add_layer(image_class(image, **i_kwargs))
File "C:\miniconda3\envs\bc\lib\site-packages\napari\layers\image\image.py", line 209, in __init__
super().__init__(
File "C:\miniconda3\envs\bc\lib\site-packages\napari\layers\intensity_mixin.py", line 22, in __init__
super().__init__(*args, **kwargs)
File "C:\miniconda3\envs\bc\lib\site-packages\napari\layers\base\base.py", line 224, in __init__
data2world_transform = Affine(
File "C:\miniconda3\envs\bc\lib\site-packages\napari\utils\transforms\transforms.py", line 313, in __init__
linear_matrix = affine_matrix[:-1, :-1]
IndexError: too many indices for array: array is 1-dimensional, but 2 were indexed
|
IndexError
|
def ensure_sequence_of_iterables(obj, length: Optional[int] = None):
"""Ensure that ``obj`` behaves like a (nested) sequence of iterables.
If length is provided and the object is already a sequence of iterables,
a ValueError will be raised if ``len(obj) != length``.
Parameters
----------
obj : Any
the object to check
length : int, optional
If provided, assert that obj has len ``length``, by default None
Returns
-------
iterable
nested sequence of iterables, or an itertools.repeat instance
Examples
--------
In [1]: ensure_sequence_of_iterables([1, 2])
Out[1]: repeat([1, 2])
In [2]: ensure_sequence_of_iterables([(1, 2), (3, 4)])
Out[2]: [(1, 2), (3, 4)]
In [3]: ensure_sequence_of_iterables({'a':1})
Out[3]: repeat({'a': 1})
In [4]: ensure_sequence_of_iterables(None)
Out[4]: repeat(None)
"""
if obj is not None and is_sequence(obj) and is_iterable(obj[0]):
if length is not None and len(obj) != length:
raise ValueError(f"length of {obj} must equal {length}")
return obj
return itertools.repeat(obj)
|
def ensure_sequence_of_iterables(obj, length: Optional[int] = None):
"""Ensure that ``obj`` behaves like a (nested) sequence of iterables.
If length is provided and the object is already a sequence of iterables,
a ValueError will be raised if ``len(obj) != length``.
Parameters
----------
obj : Any
the object to check
length : int, optional
If provided, assert that obj has len ``length``, by default None
Returns
-------
iterable
nested sequence of iterables, or an itertools.repeat instance
Examples
--------
In [1]: ensure_sequence_of_iterables([1, 2])
Out[1]: repeat([1, 2])
In [2]: ensure_sequence_of_iterables([(1, 2), (3, 4)])
Out[2]: [(1, 2), (3, 4)]
In [3]: ensure_sequence_of_iterables({'a':1})
Out[3]: repeat({'a': 1})
In [4]: ensure_sequence_of_iterables(None)
Out[4]: repeat(None)
"""
if obj and is_sequence(obj) and is_iterable(obj[0]):
if length is not None and len(obj) != length:
raise ValueError(f"length of {obj} must equal {length}")
return obj
return itertools.repeat(obj)
|
https://github.com/napari/napari/issues/2024
|
ERROR:root:Unhandled exception:
Traceback (most recent call last):
File "C:\miniconda3\envs\bc\lib\site-packages\napari\_qt\event_loop.py", line 79, in gui_qt
yield app
File "<ipython-input-185-65466d9eb63b>", line 15, in <module>
viewer.add_image(
File "C:\miniconda3\envs\bc\lib\site-packages\napari\components\viewer_model.py", line 776, in add_image
layer = self.add_layer(image_class(image, **i_kwargs))
File "C:\miniconda3\envs\bc\lib\site-packages\napari\layers\image\image.py", line 209, in __init__
super().__init__(
File "C:\miniconda3\envs\bc\lib\site-packages\napari\layers\intensity_mixin.py", line 22, in __init__
super().__init__(*args, **kwargs)
File "C:\miniconda3\envs\bc\lib\site-packages\napari\layers\base\base.py", line 224, in __init__
data2world_transform = Affine(
File "C:\miniconda3\envs\bc\lib\site-packages\napari\utils\transforms\transforms.py", line 313, in __init__
linear_matrix = affine_matrix[:-1, :-1]
IndexError: too many indices for array: array is 1-dimensional, but 2 were indexed
|
IndexError
|
def data(self, data: Union[list, np.ndarray]):
"""set the vertex data and build the vispy arrays for display"""
# convert data to a numpy array if it is not already one
data = np.asarray(data)
# Sort data by ID then time
self._order = np.lexsort((data[:, 1], data[:, 0]))
data = data[self._order]
# check check the formatting of the incoming track data
self._data = self._validate_track_data(data)
# build the indices for sorting points by time
self._ordered_points_idx = np.argsort(self.data[:, 1])
self._points = self.data[self._ordered_points_idx, 1:]
# build a tree of the track data to allow fast lookup of nearest track
self._kdtree = cKDTree(self._points)
# make the lookup table
# NOTE(arl): it's important to convert the time index to an integer
# here to make sure that we align with the napari dims index which
# will be an integer - however, the time index does not necessarily
# need to be an int, and the shader will render correctly.
frames = list(set(self._points[:, 0].astype(np.uint).tolist()))
self._points_lookup = {}
for f in frames:
idx = np.where(self._points[:, 0] == f)[0]
self._points_lookup[f] = slice(min(idx), max(idx) + 1, 1)
# make a second lookup table using a sparse matrix to convert track id
# to the vertex indices
self._id2idxs = coo_matrix(
(
np.broadcast_to(1, self.track_ids.size), # just dummy ones
(self.track_ids, np.arange(self.track_ids.size)),
)
).tocsr()
|
def data(self, data: Union[list, np.ndarray]):
"""set the vertex data and build the vispy arrays for display"""
# convert data to a numpy array if it is not already one
data = np.asarray(data)
# Sort data by ID then time
self._order = np.lexsort((data[:, 1], data[:, 0]))
data = data[self._order]
# check check the formatting of the incoming track data
self._data = self._validate_track_data(data)
# build the indices for sorting points by time
self._ordered_points_idx = np.argsort(self.data[:, 1])
self._points = self.data[self._ordered_points_idx, 1:]
# build a tree of the track data to allow fast lookup of nearest track
self._kdtree = cKDTree(self._points)
# make the lookup table
# NOTE(arl): it's important to convert the time index to an integer
# here to make sure that we align with the napari dims index which
# will be an integer - however, the time index does not necessarily
# need to be an int, and the shader will render correctly.
frames = list(set(self._points[:, 0].astype(np.uint).tolist()))
self._points_lookup = [None] * (max(frames) + 1)
for f in frames:
idx = np.where(self._points[:, 0] == f)[0]
self._points_lookup[f] = slice(min(idx), max(idx) + 1, 1)
# make a second lookup table using a sparse matrix to convert track id
# to the vertex indices
self._id2idxs = coo_matrix(
(
np.broadcast_to(1, self.track_ids.size), # just dummy ones
(self.track_ids, np.arange(self.track_ids.size)),
)
).tocsr()
|
https://github.com/napari/napari/issues/1943
|
ERROR:root:Unhandled exception:
Traceback (most recent call last):
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_qt/event_loop.py", line 79, in gui_qt
yield app
File "<ipython-input-2-498e4844dd1f>", line 9, in <module>
Tracks = napari.view_tracks(tracks_data,name='tracks')
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/view_layers.py", line 9, in view_tracks
(where <layer_type> is replaced with one of the layer types):
File "<string>", line 3, in add_tracks
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/components/add_layers_mixin.py", line 72, in add_layer
self.layers.append(layer)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/list/_model.py", line 59, in append
self.events.added(item=obj, index=len(self) - 1)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/events/event.py", line 514, in __call__
self._invoke_callback(cb, event)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/events/event.py", line 531, in _invoke_callback
_handle_exception(
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/events/event.py", line 529, in _invoke_callback
cb(event)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_qt/qt_viewer.py", line 323, in _add_layer
vispy_layer = create_vispy_visual(layer)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_vispy/utils.py", line 44, in create_vispy_visual
return visual(layer)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_vispy/vispy_tracks_layer.py", line 50, in __init__
self._on_data_change()
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_vispy/vispy_tracks_layer.py", line 62, in _on_data_change
labels_text, labels_pos = self.layer.track_labels
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/layers/tracks/tracks.py", line 543, in track_labels
padded_positions = self._pad_display_data(positions)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/layers/tracks/tracks.py", line 306, in _pad_display_data
data = np.pad(data, ((0, 0), (0, 1)), 'constant')
File "<__array_function__ internals>", line 5, in pad
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/arraypad.py", line 748, in pad
pad_width = _as_pairs(pad_width, array.ndim, as_index=True)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/arraypad.py", line 523, in _as_pairs
return np.broadcast_to(x, (ndim, 2)).tolist()
File "<__array_function__ internals>", line 5, in broadcast_to
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/stride_tricks.py", line 182, in broadcast_to
return _broadcast_to(array, shape, subok=subok, readonly=True)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/stride_tricks.py", line 125, in _broadcast_to
it = np.nditer(
ValueError: operands could not be broadcast together with remapped shapes [original->remapped]: (2,2) and requested shape (3,2)
|
ValueError
|
def track_labels(self, current_time: int) -> tuple:
"""return track labels at the current time"""
# this is the slice into the time ordered points array
if current_time not in self._points_lookup:
return [], []
lookup = self._points_lookup[current_time]
pos = self._points[lookup, ...]
lbl = [f"ID:{i}" for i in self._points_id[lookup]]
return lbl, pos
|
def track_labels(self, current_time: int) -> tuple:
"""return track labels at the current time"""
# this is the slice into the time ordered points array
lookup = self._points_lookup[current_time]
pos = self._points[lookup, ...]
lbl = [f"ID:{i}" for i in self._points_id[lookup]]
return lbl, pos
|
https://github.com/napari/napari/issues/1943
|
ERROR:root:Unhandled exception:
Traceback (most recent call last):
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_qt/event_loop.py", line 79, in gui_qt
yield app
File "<ipython-input-2-498e4844dd1f>", line 9, in <module>
Tracks = napari.view_tracks(tracks_data,name='tracks')
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/view_layers.py", line 9, in view_tracks
(where <layer_type> is replaced with one of the layer types):
File "<string>", line 3, in add_tracks
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/components/add_layers_mixin.py", line 72, in add_layer
self.layers.append(layer)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/list/_model.py", line 59, in append
self.events.added(item=obj, index=len(self) - 1)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/events/event.py", line 514, in __call__
self._invoke_callback(cb, event)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/events/event.py", line 531, in _invoke_callback
_handle_exception(
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/events/event.py", line 529, in _invoke_callback
cb(event)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_qt/qt_viewer.py", line 323, in _add_layer
vispy_layer = create_vispy_visual(layer)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_vispy/utils.py", line 44, in create_vispy_visual
return visual(layer)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_vispy/vispy_tracks_layer.py", line 50, in __init__
self._on_data_change()
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_vispy/vispy_tracks_layer.py", line 62, in _on_data_change
labels_text, labels_pos = self.layer.track_labels
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/layers/tracks/tracks.py", line 543, in track_labels
padded_positions = self._pad_display_data(positions)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/layers/tracks/tracks.py", line 306, in _pad_display_data
data = np.pad(data, ((0, 0), (0, 1)), 'constant')
File "<__array_function__ internals>", line 5, in pad
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/arraypad.py", line 748, in pad
pad_width = _as_pairs(pad_width, array.ndim, as_index=True)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/arraypad.py", line 523, in _as_pairs
return np.broadcast_to(x, (ndim, 2)).tolist()
File "<__array_function__ internals>", line 5, in broadcast_to
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/stride_tricks.py", line 182, in broadcast_to
return _broadcast_to(array, shape, subok=subok, readonly=True)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/stride_tricks.py", line 125, in _broadcast_to
it = np.nditer(
ValueError: operands could not be broadcast together with remapped shapes [original->remapped]: (2,2) and requested shape (3,2)
|
ValueError
|
def track_labels(self) -> tuple:
"""return track labels at the current time"""
labels, positions = self._manager.track_labels(self.current_time)
# if there are no labels, return empty for vispy
if not labels:
return None, (None, None)
padded_positions = self._pad_display_data(positions)
return labels, padded_positions
|
def track_labels(self) -> tuple:
"""return track labels at the current time"""
# check that current time is still within the frame map
if self.current_time < 0 or self.current_time > self._manager.max_time:
# need to return a tuple for pos to clear the vispy text visual
return None, (None, None)
labels, positions = self._manager.track_labels(self.current_time)
padded_positions = self._pad_display_data(positions)
return labels, padded_positions
|
https://github.com/napari/napari/issues/1943
|
ERROR:root:Unhandled exception:
Traceback (most recent call last):
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_qt/event_loop.py", line 79, in gui_qt
yield app
File "<ipython-input-2-498e4844dd1f>", line 9, in <module>
Tracks = napari.view_tracks(tracks_data,name='tracks')
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/view_layers.py", line 9, in view_tracks
(where <layer_type> is replaced with one of the layer types):
File "<string>", line 3, in add_tracks
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/components/add_layers_mixin.py", line 72, in add_layer
self.layers.append(layer)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/list/_model.py", line 59, in append
self.events.added(item=obj, index=len(self) - 1)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/events/event.py", line 514, in __call__
self._invoke_callback(cb, event)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/events/event.py", line 531, in _invoke_callback
_handle_exception(
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/utils/events/event.py", line 529, in _invoke_callback
cb(event)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_qt/qt_viewer.py", line 323, in _add_layer
vispy_layer = create_vispy_visual(layer)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_vispy/utils.py", line 44, in create_vispy_visual
return visual(layer)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_vispy/vispy_tracks_layer.py", line 50, in __init__
self._on_data_change()
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/_vispy/vispy_tracks_layer.py", line 62, in _on_data_change
labels_text, labels_pos = self.layer.track_labels
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/layers/tracks/tracks.py", line 543, in track_labels
padded_positions = self._pad_display_data(positions)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/napari/layers/tracks/tracks.py", line 306, in _pad_display_data
data = np.pad(data, ((0, 0), (0, 1)), 'constant')
File "<__array_function__ internals>", line 5, in pad
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/arraypad.py", line 748, in pad
pad_width = _as_pairs(pad_width, array.ndim, as_index=True)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/arraypad.py", line 523, in _as_pairs
return np.broadcast_to(x, (ndim, 2)).tolist()
File "<__array_function__ internals>", line 5, in broadcast_to
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/stride_tricks.py", line 182, in broadcast_to
return _broadcast_to(array, shape, subok=subok, readonly=True)
File "/home/kapoorlab/anaconda3/envs/tensorflowGPU/lib/python3.8/site-packages/numpy/lib/stride_tricks.py", line 125, in _broadcast_to
it = np.nditer(
ValueError: operands could not be broadcast together with remapped shapes [original->remapped]: (2,2) and requested shape (3,2)
|
ValueError
|
def patch_dmgbuild():
if not MACOS:
return
from dmgbuild import core
# will not be required after dmgbuild > v1.3.3
# see https://github.com/al45tair/dmgbuild/pull/18
with open(core.__file__, "r") as f:
src = f.read()
if (
"max(total_size / 1024" not in src
and "all_args = ['/usr/bin/hdiutil', cmd]" not in src
):
return
with open(core.__file__, "w") as f:
f.write(
src.replace("max(total_size / 1024", "max(total_size / 1000").replace(
"all_args = ['/usr/bin/hdiutil', cmd]",
"all_args = ['sudo', '/usr/bin/hdiutil', cmd]",
)
)
print("patched dmgbuild.core")
|
def patch_dmgbuild():
if not MACOS:
return
from dmgbuild import core
# will not be required after dmgbuild > v1.3.3
# see https://github.com/al45tair/dmgbuild/pull/18
with open(core.__file__, "r") as f:
src = f.read()
if "max(total_size / 1024" not in src:
return
with open(core.__file__, "w") as f:
f.write(src.replace("max(total_size / 1024", "max(total_size / 1000"))
print("patched dmgbuild.core")
|
https://github.com/napari/napari/issues/1611
|
2020-09-03T02:35:23.0596607Z ##[section]Starting: Request a runner to run this job
2020-09-03T02:35:24.3040765Z Can't find any online and idle self-hosted runner in current repository that matches the required labels: 'macos-latest'
2020-09-03T02:35:24.3040846Z Can't find any online and idle self-hosted runner in current repository's account/organization that matches the required labels: 'macos-latest'
2020-09-03T02:35:24.3041234Z Found online and idle hosted runner in current repository's account/organization that matches the required labels: 'macos-latest'
2020-09-03T02:35:24.5086668Z ##[section]Finishing: Request a runner to run this job
2020-09-03T02:35:34.8844850Z Current runner version: '2.273.0'
2020-09-03T02:35:34.9174660Z ##[group]Operating System
2020-09-03T02:35:34.9175340Z Mac OS X
2020-09-03T02:35:34.9175580Z 10.15.6
2020-09-03T02:35:34.9175740Z 19G2021
2020-09-03T02:35:34.9176060Z ##[endgroup]
2020-09-03T02:35:34.9176290Z ##[group]Virtual Environment
2020-09-03T02:35:34.9176550Z Environment: macos-10.15
2020-09-03T02:35:34.9176780Z Version: 20200829.1
2020-09-03T02:35:34.9177090Z Included Software: https://github.com/actions/virtual-environments/blob/macos-10.15/20200829.1/images/macos/macos-10.15-Readme.md
2020-09-03T02:35:34.9177350Z ##[endgroup]
2020-09-03T02:35:34.9178570Z Prepare workflow directory
2020-09-03T02:35:34.9446350Z Prepare all required actions
2020-09-03T02:35:34.9462860Z Download action repository 'actions/checkout@v2'
2020-09-03T02:35:35.5929130Z Download action repository 'actions/setup-python@v2'
2020-09-03T02:35:35.7881260Z Download action repository 'actions/upload-artifact@v2'
2020-09-03T02:35:35.9641130Z Download action repository 'bruceadams/get-release@v1.2.0'
2020-09-03T02:35:36.2726110Z Download action repository 'actions/upload-release-asset@v1'
2020-09-03T02:35:36.4816540Z ##[group]Run actions/checkout@v2
2020-09-03T02:35:36.4817050Z with:
2020-09-03T02:35:36.4817280Z fetch-depth: 0
2020-09-03T02:35:36.4817490Z repository: napari/napari
2020-09-03T02:35:36.4817980Z token: ***
2020-09-03T02:35:36.4818200Z ssh-strict: true
2020-09-03T02:35:36.4818360Z persist-credentials: true
2020-09-03T02:35:36.4818560Z clean: true
2020-09-03T02:35:36.4818750Z lfs: false
2020-09-03T02:35:36.4818940Z submodules: false
2020-09-03T02:35:36.4819130Z env:
2020-09-03T02:35:36.4819420Z GITHUB_TOKEN: ***
2020-09-03T02:35:36.4819640Z DISPLAY: :99.0
2020-09-03T02:35:36.4819830Z ##[endgroup]
2020-09-03T02:35:37.1854520Z Syncing repository: napari/napari
2020-09-03T02:35:37.1861710Z ##[group]Getting Git version info
2020-09-03T02:35:37.1863040Z Working directory is '/Users/runner/work/napari/napari'
2020-09-03T02:35:37.1863640Z [command]/usr/local/bin/git version
2020-09-03T02:35:37.9417000Z git version 2.28.0
2020-09-03T02:35:37.9419690Z ##[endgroup]
2020-09-03T02:35:37.9420750Z Deleting the contents of '/Users/runner/work/napari/napari'
2020-09-03T02:35:37.9422770Z ##[group]Initializing the repository
2020-09-03T02:35:37.9423050Z [command]/usr/local/bin/git init /Users/runner/work/napari/napari
2020-09-03T02:35:37.9423280Z Initialized empty Git repository in /Users/runner/work/napari/napari/.git/
2020-09-03T02:35:37.9423810Z [command]/usr/local/bin/git remote add origin https://github.com/napari/napari
2020-09-03T02:35:37.9424090Z ##[endgroup]
2020-09-03T02:35:37.9424570Z ##[group]Disabling automatic garbage collection
2020-09-03T02:35:37.9425470Z [command]/usr/local/bin/git config --local gc.auto 0
2020-09-03T02:35:37.9425920Z ##[endgroup]
2020-09-03T02:35:37.9428060Z ##[group]Setting up auth
2020-09-03T02:35:37.9428700Z [command]/usr/local/bin/git config --local --name-only --get-regexp core\.sshCommand
2020-09-03T02:35:37.9429800Z [command]/usr/local/bin/git submodule foreach --recursive git config --local --name-only --get-regexp 'core\.sshCommand' && git config --local --unset-all 'core.sshCommand' || :
2020-09-03T02:35:37.9430550Z [command]/usr/local/bin/git config --local --name-only --get-regexp http\.https\:\/\/github\.com\/\.extraheader
2020-09-03T02:35:37.9431740Z [command]/usr/local/bin/git submodule foreach --recursive git config --local --name-only --get-regexp 'http\.https\:\/\/github\.com\/\.extraheader' && git config --local --unset-all 'http.https://github.com/.extraheader' || :
2020-09-03T02:35:37.9432790Z [command]/usr/local/bin/git config --local http.https://github.com/.extraheader AUTHORIZATION: basic ***
2020-09-03T02:35:37.9433360Z ##[endgroup]
2020-09-03T02:35:37.9433860Z ##[group]Fetching the repository
2020-09-03T02:35:37.9434850Z [command]/usr/local/bin/git -c protocol.version=2 fetch --prune --progress --no-recurse-submodules origin +refs/heads/*:refs/remotes/origin/* +refs/tags/*:refs/tags/*
2020-09-03T02:35:37.9435950Z remote: Enumerating objects: 47, done.
2020-09-03T02:35:37.9436450Z remote: Counting objects: 2% (1/47)
2020-09-03T02:35:37.9436760Z remote: Counting objects: 4% (2/47)
2020-09-03T02:35:37.9437030Z remote: Counting objects: 6% (3/47)
2020-09-03T02:35:37.9437180Z remote: Counting objects: 8% (4/47)
2020-09-03T02:35:37.9437360Z remote: Counting objects: 10% (5/47)
2020-09-03T02:35:37.9437530Z remote: Counting objects: 12% (6/47)
2020-09-03T02:35:37.9438210Z remote: Counting objects: 14% (7/47)
2020-09-03T02:35:37.9438920Z remote: Counting objects: 17% (8/47)
2020-09-03T02:35:37.9440190Z remote: Counting objects: 19% (9/47)
2020-09-03T02:35:37.9440680Z remote: Counting objects: 21% (10/47)
2020-09-03T02:35:37.9441130Z remote: Counting objects: 23% (11/47)
2020-09-03T02:35:37.9441590Z remote: Counting objects: 25% (12/47)
2020-09-03T02:35:37.9442040Z remote: Counting objects: 27% (13/47)
2020-09-03T02:35:37.9452730Z remote: Counting objects: 29% (14/47)
2020-09-03T02:35:37.9453870Z remote: Counting objects: 31% (15/47)
2020-09-03T02:35:37.9454440Z remote: Counting objects: 34% (16/47)
2020-09-03T02:35:37.9454900Z remote: Counting objects: 36% (17/47)
2020-09-03T02:35:37.9456040Z remote: Counting objects: 38% (18/47)
2020-09-03T02:35:37.9456250Z remote: Counting objects: 40% (19/47)
2020-09-03T02:35:37.9456870Z remote: Counting objects: 42% (20/47)
2020-09-03T02:35:37.9457280Z remote: Counting objects: 44% (21/47)
2020-09-03T02:35:37.9457740Z remote: Counting objects: 46% (22/47)
2020-09-03T02:35:37.9458280Z remote: Counting objects: 48% (23/47)
2020-09-03T02:35:37.9458500Z remote: Counting objects: 51% (24/47)
2020-09-03T02:35:37.9458670Z remote: Counting objects: 53% (25/47)
2020-09-03T02:35:37.9458850Z remote: Counting objects: 55% (26/47)
2020-09-03T02:35:37.9459320Z remote: Counting objects: 57% (27/47)
2020-09-03T02:35:37.9460140Z remote: Counting objects: 59% (28/47)
2020-09-03T02:35:37.9460650Z remote: Counting objects: 61% (29/47)
2020-09-03T02:35:37.9461100Z remote: Counting objects: 63% (30/47)
2020-09-03T02:35:37.9461540Z remote: Counting objects: 65% (31/47)
2020-09-03T02:35:37.9461990Z remote: Counting objects: 68% (32/47)
2020-09-03T02:35:37.9462440Z remote: Counting objects: 70% (33/47)
2020-09-03T02:35:37.9462800Z remote: Counting objects: 72% (34/47)
2020-09-03T02:35:37.9463530Z remote: Counting objects: 74% (35/47)
2020-09-03T02:35:37.9463760Z remote: Counting objects: 76% (36/47)
2020-09-03T02:35:37.9463940Z remote: Counting objects: 78% (37/47)
2020-09-03T02:35:37.9464120Z remote: Counting objects: 80% (38/47)
2020-09-03T02:35:37.9464300Z remote: Counting objects: 82% (39/47)
2020-09-03T02:35:37.9464470Z remote: Counting objects: 85% (40/47)
2020-09-03T02:35:37.9464680Z remote: Counting objects: 87% (41/47)
2020-09-03T02:35:37.9465120Z remote: Counting objects: 89% (42/47)
2020-09-03T02:35:37.9465420Z remote: Counting objects: 91% (43/47)
2020-09-03T02:35:37.9465570Z remote: Counting objects: 93% (44/47)
2020-09-03T02:35:37.9465990Z remote: Counting objects: 95% (45/47)
2020-09-03T02:35:37.9467000Z remote: Counting objects: 97% (46/47)
2020-09-03T02:35:37.9467520Z remote: Counting objects: 100% (47/47)
2020-09-03T02:35:37.9467970Z remote: Counting objects: 100% (47/47), done.
2020-09-03T02:35:37.9468270Z remote: Compressing objects: 2% (1/43)
2020-09-03T02:35:37.9468610Z remote: Compressing objects: 4% (2/43)
2020-09-03T02:35:37.9468880Z remote: Compressing objects: 6% (3/43)
2020-09-03T02:35:37.9469160Z remote: Compressing objects: 9% (4/43)
2020-09-03T02:35:37.9469510Z remote: Compressing objects: 11% (5/43)
2020-09-03T02:35:37.9470600Z remote: Compressing objects: 13% (6/43)
2020-09-03T02:35:37.9471100Z remote: Compressing objects: 16% (7/43)
2020-09-03T02:35:37.9471390Z remote: Compressing objects: 18% (8/43)
2020-09-03T02:35:37.9471850Z remote: Compressing objects: 20% (9/43)
2020-09-03T02:35:37.9472140Z remote: Compressing objects: 23% (10/43)
2020-09-03T02:35:37.9472340Z remote: Compressing objects: 25% (11/43)
2020-09-03T02:35:37.9472760Z remote: Compressing objects: 27% (12/43)
2020-09-03T02:35:37.9473050Z remote: Compressing objects: 30% (13/43)
2020-09-03T02:35:37.9473490Z remote: Compressing objects: 32% (14/43)
2020-09-03T02:35:37.9473940Z remote: Compressing objects: 34% (15/43)
2020-09-03T02:35:37.9474380Z remote: Compressing objects: 37% (16/43)
2020-09-03T02:35:37.9474820Z remote: Compressing objects: 39% (17/43)
2020-09-03T02:35:37.9475100Z remote: Compressing objects: 41% (18/43)
2020-09-03T02:35:37.9475380Z remote: Compressing objects: 44% (19/43)
2020-09-03T02:35:37.9475960Z remote: Compressing objects: 46% (20/43)
2020-09-03T02:35:37.9477090Z remote: Compressing objects: 48% (21/43)
2020-09-03T02:35:37.9477290Z remote: Compressing objects: 51% (22/43)
2020-09-03T02:35:37.9477430Z remote: Compressing objects: 53% (23/43)
2020-09-03T02:35:37.9477610Z remote: Compressing objects: 55% (24/43)
2020-09-03T02:35:37.9477790Z remote: Compressing objects: 58% (25/43)
2020-09-03T02:35:37.9477960Z remote: Compressing objects: 60% (26/43)
2020-09-03T02:35:37.9478140Z remote: Compressing objects: 62% (27/43)
2020-09-03T02:35:37.9478320Z remote: Compressing objects: 65% (28/43)
2020-09-03T02:35:37.9478490Z remote: Compressing objects: 67% (29/43)
2020-09-03T02:35:37.9478680Z remote: Compressing objects: 69% (30/43)
2020-09-03T02:35:37.9479020Z remote: Compressing objects: 72% (31/43)
2020-09-03T02:35:37.9479230Z remote: Compressing objects: 74% (32/43)
2020-09-03T02:35:37.9479420Z remote: Compressing objects: 76% (33/43)
2020-09-03T02:35:37.9479560Z remote: Compressing objects: 79% (34/43)
2020-09-03T02:35:37.9479740Z remote: Compressing objects: 81% (35/43)
2020-09-03T02:35:37.9479920Z remote: Compressing objects: 83% (36/43)
2020-09-03T02:35:37.9480090Z remote: Compressing objects: 86% (37/43)
2020-09-03T02:35:37.9480280Z remote: Compressing objects: 88% (38/43)
2020-09-03T02:35:37.9480460Z remote: Compressing objects: 90% (39/43)
2020-09-03T02:35:37.9480670Z remote: Compressing objects: 93% (40/43)
2020-09-03T02:35:37.9481060Z remote: Compressing objects: 95% (41/43)
2020-09-03T02:35:37.9481230Z remote: Compressing objects: 97% (42/43)
2020-09-03T02:35:37.9481410Z remote: Compressing objects: 100% (43/43)
2020-09-03T02:35:37.9481590Z remote: Compressing objects: 100% (43/43), done.
2020-09-03T02:35:37.9481810Z Receiving objects: 0% (1/10717)
2020-09-03T02:35:37.9482000Z Receiving objects: 1% (108/10717)
2020-09-03T02:35:37.9482190Z Receiving objects: 2% (215/10717)
2020-09-03T02:35:37.9482370Z Receiving objects: 3% (322/10717)
2020-09-03T02:35:37.9482540Z Receiving objects: 4% (429/10717)
2020-09-03T02:35:37.9482710Z Receiving objects: 5% (536/10717)
2020-09-03T02:35:37.9482850Z Receiving objects: 6% (644/10717)
2020-09-03T02:35:37.9483020Z Receiving objects: 7% (751/10717)
2020-09-03T02:35:37.9483190Z Receiving objects: 8% (858/10717)
2020-09-03T02:35:37.9581050Z Receiving objects: 9% (965/10717)
2020-09-03T02:35:38.4608170Z Receiving objects: 10% (1072/10717)
2020-09-03T02:35:38.4613280Z Receiving objects: 11% (1179/10717)
2020-09-03T02:35:38.4613620Z Receiving objects: 12% (1287/10717)
2020-09-03T02:35:38.4613900Z Receiving objects: 13% (1394/10717)
2020-09-03T02:35:38.4614510Z Receiving objects: 14% (1501/10717)
2020-09-03T02:35:38.4615050Z Receiving objects: 15% (1608/10717)
2020-09-03T02:35:38.4616020Z Receiving objects: 16% (1715/10717)
2020-09-03T02:35:38.4616940Z Receiving objects: 17% (1822/10717)
2020-09-03T02:35:38.4617760Z Receiving objects: 18% (1930/10717)
2020-09-03T02:35:38.4618780Z Receiving objects: 19% (2037/10717)
2020-09-03T02:35:38.4619670Z Receiving objects: 20% (2144/10717)
2020-09-03T02:35:38.4620580Z Receiving objects: 21% (2251/10717)
2020-09-03T02:35:38.4620950Z Receiving objects: 22% (2358/10717)
2020-09-03T02:35:38.4621770Z Receiving objects: 23% (2465/10717)
2020-09-03T02:35:38.4622230Z Receiving objects: 24% (2573/10717)
2020-09-03T02:35:38.4622870Z Receiving objects: 25% (2680/10717)
2020-09-03T02:35:38.4623340Z Receiving objects: 26% (2787/10717)
2020-09-03T02:35:38.4624220Z Receiving objects: 27% (2894/10717)
2020-09-03T02:35:38.4625050Z Receiving objects: 28% (3001/10717)
2020-09-03T02:35:38.4625270Z Receiving objects: 29% (3108/10717)
2020-09-03T02:35:38.4625460Z Receiving objects: 30% (3216/10717)
2020-09-03T02:35:38.4626090Z Receiving objects: 31% (3323/10717)
2020-09-03T02:35:38.4627050Z Receiving objects: 32% (3430/10717)
2020-09-03T02:35:38.4627530Z Receiving objects: 33% (3537/10717)
2020-09-03T02:35:38.4628030Z Receiving objects: 34% (3644/10717)
2020-09-03T02:35:38.4628810Z Receiving objects: 35% (3751/10717)
2020-09-03T02:35:38.4629540Z Receiving objects: 36% (3859/10717)
2020-09-03T02:35:38.4630300Z Receiving objects: 37% (3966/10717)
2020-09-03T02:35:38.4631060Z Receiving objects: 38% (4073/10717)
2020-09-03T02:35:38.4631840Z Receiving objects: 39% (4180/10717)
2020-09-03T02:35:38.4632880Z Receiving objects: 40% (4287/10717)
2020-09-03T02:35:38.4633290Z Receiving objects: 41% (4394/10717)
2020-09-03T02:35:38.4633580Z Receiving objects: 42% (4502/10717)
2020-09-03T02:35:38.4633990Z Receiving objects: 43% (4609/10717)
2020-09-03T02:35:38.4634340Z Receiving objects: 44% (4716/10717)
2020-09-03T02:35:38.4635330Z Receiving objects: 45% (4823/10717)
2020-09-03T02:35:38.4635740Z Receiving objects: 46% (4930/10717)
2020-09-03T02:35:38.4636190Z Receiving objects: 47% (5037/10717)
2020-09-03T02:35:38.4636610Z Receiving objects: 48% (5145/10717)
2020-09-03T02:35:38.4637030Z Receiving objects: 49% (5252/10717)
2020-09-03T02:35:38.4637370Z Receiving objects: 50% (5359/10717)
2020-09-03T02:35:38.4638250Z Receiving objects: 51% (5466/10717)
2020-09-03T02:35:38.4639060Z Receiving objects: 52% (5573/10717)
2020-09-03T02:35:38.4639900Z Receiving objects: 53% (5681/10717)
2020-09-03T02:35:38.4640510Z Receiving objects: 54% (5788/10717)
2020-09-03T02:35:38.4640970Z Receiving objects: 55% (5895/10717)
2020-09-03T02:35:38.4641700Z Receiving objects: 56% (6002/10717)
2020-09-03T02:35:38.4642050Z Receiving objects: 57% (6109/10717)
2020-09-03T02:35:38.4642750Z Receiving objects: 58% (6216/10717)
2020-09-03T02:35:38.4643260Z Receiving objects: 59% (6324/10717)
2020-09-03T02:35:38.4643750Z Receiving objects: 60% (6431/10717)
2020-09-03T02:35:38.4644150Z Receiving objects: 61% (6538/10717)
2020-09-03T02:35:38.4644490Z Receiving objects: 62% (6645/10717)
2020-09-03T02:35:38.4644900Z Receiving objects: 63% (6752/10717)
2020-09-03T02:35:38.4645210Z Receiving objects: 64% (6859/10717)
2020-09-03T02:35:38.4645860Z Receiving objects: 65% (6967/10717)
2020-09-03T02:35:38.4646070Z Receiving objects: 66% (7074/10717)
2020-09-03T02:35:38.4646240Z Receiving objects: 67% (7181/10717)
2020-09-03T02:35:38.4646430Z Receiving objects: 68% (7288/10717)
2020-09-03T02:35:38.4646620Z Receiving objects: 69% (7395/10717)
2020-09-03T02:35:38.4646800Z Receiving objects: 70% (7502/10717)
2020-09-03T02:35:38.4646940Z Receiving objects: 71% (7610/10717)
2020-09-03T02:35:38.4647120Z Receiving objects: 72% (7717/10717)
2020-09-03T02:35:38.4647290Z Receiving objects: 73% (7824/10717)
2020-09-03T02:35:38.4647470Z Receiving objects: 74% (7931/10717)
2020-09-03T02:35:38.4647640Z Receiving objects: 75% (8038/10717)
2020-09-03T02:35:38.4647820Z Receiving objects: 76% (8145/10717)
2020-09-03T02:35:38.4648000Z Receiving objects: 77% (8253/10717)
2020-09-03T02:35:38.4648910Z Receiving objects: 78% (8360/10717)
2020-09-03T02:35:38.4649090Z Receiving objects: 79% (8467/10717)
2020-09-03T02:35:38.4649490Z Receiving objects: 80% (8574/10717)
2020-09-03T02:35:38.4649960Z Receiving objects: 81% (8681/10717)
2020-09-03T02:35:38.4650480Z Receiving objects: 82% (8788/10717)
2020-09-03T02:35:38.4650920Z Receiving objects: 83% (8896/10717)
2020-09-03T02:35:38.4651940Z Receiving objects: 84% (9003/10717)
2020-09-03T02:35:38.4652500Z Receiving objects: 85% (9110/10717)
2020-09-03T02:35:38.4653110Z Receiving objects: 86% (9217/10717)
2020-09-03T02:35:38.4653640Z Receiving objects: 87% (9324/10717)
2020-09-03T02:35:38.4654060Z Receiving objects: 88% (9431/10717)
2020-09-03T02:35:38.4654660Z Receiving objects: 89% (9539/10717)
2020-09-03T02:35:38.4655360Z Receiving objects: 90% (9646/10717)
2020-09-03T02:35:38.4656060Z Receiving objects: 91% (9753/10717)
2020-09-03T02:35:38.4657170Z Receiving objects: 92% (9860/10717)
2020-09-03T02:35:38.4658040Z Receiving objects: 93% (9967/10717)
2020-09-03T02:35:38.4658710Z Receiving objects: 94% (10074/10717)
2020-09-03T02:35:38.4659350Z Receiving objects: 95% (10182/10717)
2020-09-03T02:35:38.4659950Z Receiving objects: 96% (10289/10717)
2020-09-03T02:35:38.4660460Z Receiving objects: 97% (10396/10717)
2020-09-03T02:35:38.4661810Z remote: Total 10717 (delta 16), reused 12 (delta 4), pack-reused 10670
2020-09-03T02:35:38.4662390Z Receiving objects: 98% (10503/10717)
2020-09-03T02:35:38.4662900Z Receiving objects: 99% (10610/10717)
2020-09-03T02:35:38.4663390Z Receiving objects: 100% (10717/10717)
2020-09-03T02:35:38.4663830Z Receiving objects: 100% (10717/10717), 19.50 MiB | 52.68 MiB/s, done.
2020-09-03T02:35:38.4664540Z Resolving deltas: 0% (0/7480)
2020-09-03T02:35:38.4665340Z Resolving deltas: 1% (86/7480)
2020-09-03T02:35:38.4666210Z Resolving deltas: 2% (150/7480)
2020-09-03T02:35:38.4666690Z Resolving deltas: 3% (226/7480)
2020-09-03T02:35:38.4667480Z Resolving deltas: 4% (303/7480)
2020-09-03T02:35:38.4667740Z Resolving deltas: 5% (374/7480)
2020-09-03T02:35:38.4667950Z Resolving deltas: 6% (458/7480)
2020-09-03T02:35:38.4668640Z Resolving deltas: 7% (527/7480)
2020-09-03T02:35:38.4669910Z Resolving deltas: 8% (602/7480)
2020-09-03T02:35:38.4670240Z Resolving deltas: 9% (681/7480)
2020-09-03T02:35:38.4671200Z Resolving deltas: 10% (751/7480)
2020-09-03T02:35:38.4671950Z Resolving deltas: 11% (825/7480)
2020-09-03T02:35:38.4672400Z Resolving deltas: 12% (899/7480)
2020-09-03T02:35:38.4673290Z Resolving deltas: 13% (976/7480)
2020-09-03T02:35:38.4674090Z Resolving deltas: 14% (1055/7480)
2020-09-03T02:35:38.4674260Z Resolving deltas: 15% (1122/7480)
2020-09-03T02:35:38.4674750Z Resolving deltas: 16% (1246/7480)
2020-09-03T02:35:38.4675370Z Resolving deltas: 17% (1275/7480)
2020-09-03T02:35:38.4675860Z Resolving deltas: 18% (1352/7480)
2020-09-03T02:35:38.4676630Z Resolving deltas: 19% (1440/7480)
2020-09-03T02:35:38.4677130Z Resolving deltas: 20% (1496/7480)
2020-09-03T02:35:38.4677650Z Resolving deltas: 21% (1571/7480)
2020-09-03T02:35:38.4678160Z Resolving deltas: 22% (1668/7480)
2020-09-03T02:35:38.4678380Z Resolving deltas: 23% (1726/7480)
2020-09-03T02:35:38.4678680Z Resolving deltas: 24% (1797/7480)
2020-09-03T02:35:38.4678830Z Resolving deltas: 25% (1879/7480)
2020-09-03T02:35:38.4679320Z Resolving deltas: 26% (1946/7480)
2020-09-03T02:35:38.4679810Z Resolving deltas: 27% (2023/7480)
2020-09-03T02:35:38.4680290Z Resolving deltas: 28% (2109/7480)
2020-09-03T02:35:38.4680770Z Resolving deltas: 29% (2181/7480)
2020-09-03T02:35:38.4681100Z Resolving deltas: 30% (2256/7480)
2020-09-03T02:35:38.4681560Z Resolving deltas: 31% (2319/7480)
2020-09-03T02:35:38.4682050Z Resolving deltas: 32% (2407/7480)
2020-09-03T02:35:38.4682660Z Resolving deltas: 33% (2470/7480)
2020-09-03T02:35:38.4682880Z Resolving deltas: 34% (2556/7480)
2020-09-03T02:35:38.4683080Z Resolving deltas: 35% (2626/7480)
2020-09-03T02:35:38.4683220Z Resolving deltas: 36% (2710/7480)
2020-09-03T02:35:38.4683810Z Resolving deltas: 37% (2783/7480)
2020-09-03T02:35:38.4684040Z Resolving deltas: 38% (2867/7480)
2020-09-03T02:35:38.4684250Z Resolving deltas: 39% (2918/7480)
2020-09-03T02:35:38.4684440Z Resolving deltas: 40% (2997/7480)
2020-09-03T02:35:38.4684630Z Resolving deltas: 41% (3067/7480)
2020-09-03T02:35:38.4684910Z Resolving deltas: 42% (3154/7480)
2020-09-03T02:35:38.4685130Z Resolving deltas: 43% (3219/7480)
2020-09-03T02:35:38.4685650Z Resolving deltas: 44% (3301/7480)
2020-09-03T02:35:38.4686450Z Resolving deltas: 45% (3386/7480)
2020-09-03T02:35:38.4687170Z Resolving deltas: 46% (3446/7480)
2020-09-03T02:35:38.4688160Z Resolving deltas: 47% (3521/7480)
2020-09-03T02:35:38.4689440Z Resolving deltas: 48% (3591/7480)
2020-09-03T02:35:38.4689690Z Resolving deltas: 49% (3666/7480)
2020-09-03T02:35:38.4690020Z Resolving deltas: 50% (3740/7480)
2020-09-03T02:35:38.4690220Z Resolving deltas: 51% (3816/7480)
2020-09-03T02:35:38.4690420Z Resolving deltas: 52% (3894/7480)
2020-09-03T02:35:38.4690650Z Resolving deltas: 53% (3967/7480)
2020-09-03T02:35:38.4691150Z Resolving deltas: 54% (4045/7480)
2020-09-03T02:35:38.4691460Z Resolving deltas: 55% (4121/7480)
2020-09-03T02:35:38.4691850Z Resolving deltas: 56% (4194/7480)
2020-09-03T02:35:38.4692190Z Resolving deltas: 57% (4277/7480)
2020-09-03T02:35:38.4692490Z Resolving deltas: 58% (4366/7480)
2020-09-03T02:35:38.4693000Z Resolving deltas: 59% (4414/7480)
2020-09-03T02:35:38.4693290Z Resolving deltas: 60% (4533/7480)
2020-09-03T02:35:38.4693550Z Resolving deltas: 61% (4564/7480)
2020-09-03T02:35:38.4693700Z Resolving deltas: 62% (4644/7480)
2020-09-03T02:35:38.4693890Z Resolving deltas: 63% (4716/7480)
2020-09-03T02:35:38.4694090Z Resolving deltas: 64% (4797/7480)
2020-09-03T02:35:38.4694350Z Resolving deltas: 65% (4876/7480)
2020-09-03T02:35:38.4812940Z Resolving deltas: 66% (4939/7480)
2020-09-03T02:35:38.4813570Z Resolving deltas: 67% (5019/7480)
2020-09-03T02:35:38.4814640Z Resolving deltas: 68% (5087/7480)
2020-09-03T02:35:38.4815340Z Resolving deltas: 69% (5168/7480)
2020-09-03T02:35:38.4815970Z Resolving deltas: 70% (5236/7480)
2020-09-03T02:35:38.4816580Z Resolving deltas: 71% (5322/7480)
2020-09-03T02:35:38.4817180Z Resolving deltas: 72% (5386/7480)
2020-09-03T02:35:38.4817970Z Resolving deltas: 73% (5474/7480)
2020-09-03T02:35:38.4818840Z Resolving deltas: 74% (5542/7480)
2020-09-03T02:35:38.4819240Z Resolving deltas: 75% (5623/7480)
2020-09-03T02:35:38.4820060Z Resolving deltas: 76% (5685/7480)
2020-09-03T02:35:38.4820460Z Resolving deltas: 77% (5761/7480)
2020-09-03T02:35:38.4820770Z Resolving deltas: 78% (5836/7480)
2020-09-03T02:35:38.4821670Z Resolving deltas: 79% (5912/7480)
2020-09-03T02:35:38.4822110Z Resolving deltas: 80% (5993/7480)
2020-09-03T02:35:38.4822570Z Resolving deltas: 81% (6059/7480)
2020-09-03T02:35:38.4823020Z Resolving deltas: 82% (6137/7480)
2020-09-03T02:35:38.4823450Z Resolving deltas: 83% (6215/7480)
2020-09-03T02:35:38.4823880Z Resolving deltas: 84% (6291/7480)
2020-09-03T02:35:38.4824340Z Resolving deltas: 85% (6359/7480)
2020-09-03T02:35:38.4824770Z Resolving deltas: 86% (6442/7480)
2020-09-03T02:35:38.4825100Z Resolving deltas: 87% (6508/7480)
2020-09-03T02:35:38.4825530Z Resolving deltas: 88% (6591/7480)
2020-09-03T02:35:38.4825940Z Resolving deltas: 89% (6658/7480)
2020-09-03T02:35:38.4826370Z Resolving deltas: 90% (6736/7480)
2020-09-03T02:35:38.4826770Z Resolving deltas: 91% (6841/7480)
2020-09-03T02:35:38.4827180Z Resolving deltas: 92% (6886/7480)
2020-09-03T02:35:38.4827490Z Resolving deltas: 93% (6963/7480)
2020-09-03T02:35:38.4827700Z Resolving deltas: 94% (7062/7480)
2020-09-03T02:35:38.4827980Z Resolving deltas: 95% (7112/7480)
2020-09-03T02:35:38.4828320Z Resolving deltas: 96% (7188/7480)
2020-09-03T02:35:38.4828650Z Resolving deltas: 97% (7256/7480)
2020-09-03T02:35:38.4828930Z Resolving deltas: 98% (7343/7480)
2020-09-03T02:35:38.4829810Z Resolving deltas: 99% (7412/7480)
2020-09-03T02:35:38.4830190Z Resolving deltas: 100% (7480/7480)
2020-09-03T02:35:38.4831140Z Resolving deltas: 100% (7480/7480), done.
2020-09-03T02:35:38.4831460Z From https://github.com/napari/napari
2020-09-03T02:35:38.4833240Z * [new branch] master -> origin/master
2020-09-03T02:35:38.4834240Z * [new tag] v0.0.1 -> v0.0.1
2020-09-03T02:35:38.4835570Z * [new tag] v0.0.2 -> v0.0.2
2020-09-03T02:35:38.4836820Z * [new tag] v0.0.3.1 -> v0.0.3.1
2020-09-03T02:35:38.4838110Z * [new tag] v0.0.4 -> v0.0.4
2020-09-03T02:35:38.4839410Z * [new tag] v0.0.5 -> v0.0.5
2020-09-03T02:35:38.4840660Z * [new tag] v0.0.5.1 -> v0.0.5.1
2020-09-03T02:35:38.4841890Z * [new tag] v0.0.6 -> v0.0.6
2020-09-03T02:35:38.4843100Z * [new tag] v0.0.7 -> v0.0.7
2020-09-03T02:35:38.4844320Z * [new tag] v0.0.8 -> v0.0.8
2020-09-03T02:35:38.4845530Z * [new tag] v0.0.9 -> v0.0.9
2020-09-03T02:35:38.4846760Z * [new tag] v0.1.0 -> v0.1.0
2020-09-03T02:35:38.4848070Z * [new tag] v0.1.1 -> v0.1.1
2020-09-03T02:35:38.4849910Z * [new tag] v0.1.2 -> v0.1.2
2020-09-03T02:35:38.4851410Z * [new tag] v0.1.3 -> v0.1.3
2020-09-03T02:35:38.4852910Z * [new tag] v0.1.4 -> v0.1.4
2020-09-03T02:35:38.4854000Z * [new tag] v0.1.5 -> v0.1.5
2020-09-03T02:35:38.4855970Z * [new tag] v0.2.0 -> v0.2.0
2020-09-03T02:35:38.4857010Z * [new tag] v0.2.1 -> v0.2.1
2020-09-03T02:35:38.4857860Z * [new tag] v0.2.10 -> v0.2.10
2020-09-03T02:35:38.4858590Z * [new tag] v0.2.10rc0 -> v0.2.10rc0
2020-09-03T02:35:38.4859820Z * [new tag] v0.2.11 -> v0.2.11
2020-09-03T02:35:38.4860860Z * [new tag] v0.2.11rc0 -> v0.2.11rc0
2020-09-03T02:35:38.4861760Z * [new tag] v0.2.12 -> v0.2.12
2020-09-03T02:35:38.4862670Z * [new tag] v0.2.12rc0 -> v0.2.12rc0
2020-09-03T02:35:38.4864540Z * [new tag] v0.2.12rc1 -> v0.2.12rc1
2020-09-03T02:35:38.4866050Z * [new tag] v0.2.12rc2 -> v0.2.12rc2
2020-09-03T02:35:38.4867770Z * [new tag] v0.2.12rc3 -> v0.2.12rc3
2020-09-03T02:35:38.4868930Z * [new tag] v0.2.12rc4 -> v0.2.12rc4
2020-09-03T02:35:38.4870080Z * [new tag] v0.2.2 -> v0.2.2
2020-09-03T02:35:38.4871720Z * [new tag] v0.2.3 -> v0.2.3
2020-09-03T02:35:38.4872820Z * [new tag] v0.2.4 -> v0.2.4
2020-09-03T02:35:38.4873860Z * [new tag] v0.2.4rc1 -> v0.2.4rc1
2020-09-03T02:35:38.4875390Z * [new tag] v0.2.5 -> v0.2.5
2020-09-03T02:35:38.4876940Z * [new tag] v0.2.5rc1 -> v0.2.5rc1
2020-09-03T02:35:38.4878440Z * [new tag] v0.2.6 -> v0.2.6
2020-09-03T02:35:38.4879620Z * [new tag] v0.2.6rc1 -> v0.2.6rc1
2020-09-03T02:35:38.4880760Z * [new tag] v0.2.7 -> v0.2.7
2020-09-03T02:35:38.4882430Z * [new tag] v0.2.7rc1 -> v0.2.7rc1
2020-09-03T02:35:38.4883530Z * [new tag] v0.2.7rc2 -> v0.2.7rc2
2020-09-03T02:35:38.4884580Z * [new tag] v0.2.8 -> v0.2.8
2020-09-03T02:35:38.4885740Z * [new tag] v0.2.8rc1 -> v0.2.8rc1
2020-09-03T02:35:38.4886860Z * [new tag] v0.2.9 -> v0.2.9
2020-09-03T02:35:38.4888410Z * [new tag] v0.2.9rc1 -> v0.2.9rc1
2020-09-03T02:35:38.4889560Z * [new tag] v0.3.0 -> v0.3.0
2020-09-03T02:35:38.4891240Z * [new tag] v0.3.0rc0 -> v0.3.0rc0
2020-09-03T02:35:38.4892330Z * [new tag] v0.3.0rc1 -> v0.3.0rc1
2020-09-03T02:35:38.4893450Z * [new tag] v0.3.1 -> v0.3.1
2020-09-03T02:35:38.4894490Z * [new tag] v0.3.1rc0 -> v0.3.1rc0
2020-09-03T02:35:38.4895710Z * [new tag] v0.3.2 -> v0.3.2
2020-09-03T02:35:38.4896900Z * [new tag] v0.3.2rc0 -> v0.3.2rc0
2020-09-03T02:35:38.4898040Z * [new tag] v0.3.2rc1 -> v0.3.2rc1
2020-09-03T02:35:38.4899210Z * [new tag] v0.3.2rc2 -> v0.3.2rc2
2020-09-03T02:35:38.4900440Z * [new tag] v0.3.2rc3 -> v0.3.2rc3
2020-09-03T02:35:38.4901580Z * [new tag] v0.3.3 -> v0.3.3
2020-09-03T02:35:38.4903400Z * [new tag] v0.3.3rc0 -> v0.3.3rc0
2020-09-03T02:35:38.4904550Z * [new tag] v0.3.4 -> v0.3.4
2020-09-03T02:35:38.4905700Z * [new tag] v0.3.4rc0 -> v0.3.4rc0
2020-09-03T02:35:38.4906840Z * [new tag] v0.3.5 -> v0.3.5
2020-09-03T02:35:38.4907970Z * [new tag] v0.3.5rc0 -> v0.3.5rc0
2020-09-03T02:35:38.4909110Z * [new tag] v0.3.5rc1 -> v0.3.5rc1
2020-09-03T02:35:38.4910330Z * [new tag] v0.3.6 -> v0.3.6
2020-09-03T02:35:38.4911450Z * [new tag] v0.3.6rc0 -> v0.3.6rc0
2020-09-03T02:35:38.4912720Z * [new tag] v0.3.6rc1 -> v0.3.6rc1
2020-09-03T02:35:38.4914590Z * [new tag] v0.3.6rc2 -> v0.3.6rc2
2020-09-03T02:35:38.4915850Z * [new tag] v0.3.7rc0 -> v0.3.7rc0
2020-09-03T02:35:38.4917010Z * [new tag] v0.3.7rc1 -> v0.3.7rc1
2020-09-03T02:35:38.4918150Z * [new tag] v0.3.7rc2 -> v0.3.7rc2
2020-09-03T02:35:38.4919680Z * [new tag] v0.3.7rc3 -> v0.3.7rc3
2020-09-03T02:35:38.4930520Z [command]/usr/local/bin/git tag --list v0.3.7rc3
2020-09-03T02:35:38.5025190Z v0.3.7rc3
2020-09-03T02:35:38.5037710Z [command]/usr/local/bin/git rev-parse refs/tags/v0.3.7rc3
2020-09-03T02:35:38.5097480Z ea0fc9ca29d730808b1a66a8710679ee0fbe7ab5
2020-09-03T02:35:38.5103990Z ##[endgroup]
2020-09-03T02:35:38.5104580Z ##[group]Determining the checkout info
2020-09-03T02:35:38.5107420Z ##[endgroup]
2020-09-03T02:35:38.5107950Z ##[group]Checking out the ref
2020-09-03T02:35:38.5114450Z [command]/usr/local/bin/git checkout --progress --force refs/tags/v0.3.7rc3
2020-09-03T02:35:38.6599120Z HEAD is now at ea0fc9c Skip bundle test when setup.cfg is missing (#1608)
2020-09-03T02:35:38.6615110Z ##[endgroup]
2020-09-03T02:35:38.6622620Z [command]/usr/local/bin/git log -1
2020-09-03T02:35:38.6728870Z commit ea0fc9ca29d730808b1a66a8710679ee0fbe7ab5
2020-09-03T02:35:38.6731460Z Author: Juan Nunez-Iglesias <juan.nunez-iglesias@monash.edu>
2020-09-03T02:35:38.6733120Z Date: Thu Sep 3 12:34:39 2020 +1000
2020-09-03T02:35:38.6733820Z
2020-09-03T02:35:38.6734690Z Skip bundle test when setup.cfg is missing (#1608)
2020-09-03T02:35:38.6735460Z
2020-09-03T02:35:38.6736200Z * Skip bundle test in bdist
2020-09-03T02:35:38.6739750Z
2020-09-03T02:35:38.6743280Z * Don't remove test_bundle in sdist for consistency with bdist
2020-09-03T02:35:38.6743950Z
2020-09-03T02:35:38.6749860Z * Update release notes
2020-09-03T02:35:38.6751410Z
2020-09-03T02:35:38.6753010Z * Remove inaccurate module docstring in test_bundle
2020-09-03T02:35:38.6885830Z ##[group]Run actions/setup-python@v2
2020-09-03T02:35:38.6886120Z with:
2020-09-03T02:35:38.6886240Z python-version: 3.8
2020-09-03T02:35:38.6886580Z token: ***
2020-09-03T02:35:38.6886720Z env:
2020-09-03T02:35:38.6886980Z GITHUB_TOKEN: ***
2020-09-03T02:35:38.6887160Z DISPLAY: :99.0
2020-09-03T02:35:38.6887310Z ##[endgroup]
2020-09-03T02:35:38.7740470Z Successfully setup CPython (3.8.5)
2020-09-03T02:35:38.7831450Z ##[group]Run python -m pip install --upgrade pip
2020-09-03T02:35:38.7832040Z �[36;1mpython -m pip install --upgrade pip �[0m
2020-09-03T02:35:38.7832410Z �[36;1mpython -m pip install briefcase==0.3.1 tomlkit wheel�[0m
2020-09-03T02:35:38.7832710Z �[36;1mpython -m pip install -e .[pyside2]�[0m
2020-09-03T02:35:38.8073350Z shell: /bin/bash -e {0}
2020-09-03T02:35:38.8073570Z env:
2020-09-03T02:35:38.8074060Z GITHUB_TOKEN: ***
2020-09-03T02:35:38.8074250Z DISPLAY: :99.0
2020-09-03T02:35:38.8074390Z pythonLocation: /Users/runner/hostedtoolcache/Python/3.8.5/x64
2020-09-03T02:35:38.8074560Z ##[endgroup]
2020-09-03T02:35:41.0869480Z Requirement already up-to-date: pip in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (20.2.2)
2020-09-03T02:35:41.5928780Z Collecting briefcase==0.3.1
2020-09-03T02:35:41.6138770Z Downloading briefcase-0.3.1-py2.py3-none-any.whl (77 kB)
2020-09-03T02:35:41.7985320Z Collecting tomlkit
2020-09-03T02:35:41.8052800Z Downloading tomlkit-0.7.0-py2.py3-none-any.whl (32 kB)
2020-09-03T02:35:42.0119920Z Collecting wheel
2020-09-03T02:35:42.0173290Z Downloading wheel-0.35.1-py2.py3-none-any.whl (33 kB)
2020-09-03T02:35:42.1233540Z Collecting requests>=2.22.0
2020-09-03T02:35:42.1285950Z Downloading requests-2.24.0-py2.py3-none-any.whl (61 kB)
2020-09-03T02:35:42.1657480Z Collecting dmgbuild>=1.3.3; sys_platform == "darwin"
2020-09-03T02:35:42.1726680Z Downloading dmgbuild-1.3.3.tar.gz (35 kB)
2020-09-03T02:35:43.1080990Z Requirement already satisfied: setuptools>=45 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from briefcase==0.3.1) (47.1.0)
2020-09-03T02:35:43.2251660Z Collecting GitPython>=3.0.8
2020-09-03T02:35:43.2300760Z Downloading GitPython-3.1.7-py3-none-any.whl (158 kB)
2020-09-03T02:35:43.3256020Z Collecting Jinja2<3.0
2020-09-03T02:35:43.3305500Z Downloading Jinja2-2.11.2-py2.py3-none-any.whl (125 kB)
2020-09-03T02:35:43.3658170Z Collecting toml>=0.10.0
2020-09-03T02:35:43.3705810Z Downloading toml-0.10.1-py2.py3-none-any.whl (19 kB)
2020-09-03T02:35:43.3772210Z Requirement already satisfied: pip>=20 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from briefcase==0.3.1) (20.2.2)
2020-09-03T02:35:43.4486540Z Collecting cookiecutter>=1.0
2020-09-03T02:35:43.4536090Z Downloading cookiecutter-1.7.2-py2.py3-none-any.whl (34 kB)
2020-09-03T02:35:43.5961400Z Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
2020-09-03T02:35:43.6014240Z Downloading urllib3-1.25.10-py2.py3-none-any.whl (127 kB)
2020-09-03T02:35:43.7132810Z Collecting certifi>=2017.4.17
2020-09-03T02:35:43.7184670Z Downloading certifi-2020.6.20-py2.py3-none-any.whl (156 kB)
2020-09-03T02:35:43.7781270Z Collecting idna<3,>=2.5
2020-09-03T02:35:43.7831140Z Downloading idna-2.10-py2.py3-none-any.whl (58 kB)
2020-09-03T02:35:43.8209240Z Collecting chardet<4,>=3.0.2
2020-09-03T02:35:43.8267860Z Downloading chardet-3.0.4-py2.py3-none-any.whl (133 kB)
2020-09-03T02:35:43.8493490Z Collecting ds_store>=1.1.0
2020-09-03T02:35:43.8549370Z Downloading ds_store-1.1.2.tar.gz (13 kB)
2020-09-03T02:35:44.1835290Z Collecting mac_alias>=2.0.1
2020-09-03T02:35:44.1886830Z Downloading mac_alias-2.0.7.tar.gz (17 kB)
2020-09-03T02:35:44.5678940Z Collecting gitdb<5,>=4.0.1
2020-09-03T02:35:44.5680690Z Downloading gitdb-4.0.5-py3-none-any.whl (63 kB)
2020-09-03T02:35:44.6664200Z Collecting MarkupSafe>=0.23
2020-09-03T02:35:44.6665300Z Downloading MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl (16 kB)
2020-09-03T02:35:44.6909520Z Collecting binaryornot>=0.4.4
2020-09-03T02:35:44.6910820Z Downloading binaryornot-0.4.4-py2.py3-none-any.whl (9.0 kB)
2020-09-03T02:35:44.7514590Z Collecting click>=7.0
2020-09-03T02:35:44.7560730Z Downloading click-7.1.2-py2.py3-none-any.whl (82 kB)
2020-09-03T02:35:44.7892470Z Collecting poyo>=0.5.0
2020-09-03T02:35:44.7943210Z Downloading poyo-0.5.0-py2.py3-none-any.whl (10 kB)
2020-09-03T02:35:44.8496410Z Collecting six>=1.10
2020-09-03T02:35:44.8550860Z Downloading six-1.15.0-py2.py3-none-any.whl (10 kB)
2020-09-03T02:35:44.8851910Z Collecting python-slugify>=4.0.0
2020-09-03T02:35:44.8901340Z Downloading python-slugify-4.0.1.tar.gz (11 kB)
2020-09-03T02:35:45.2315810Z Collecting jinja2-time>=0.2.0
2020-09-03T02:35:45.2373350Z Downloading jinja2_time-0.2.0-py2.py3-none-any.whl (6.4 kB)
2020-09-03T02:35:45.2688950Z Collecting biplist>=0.6
2020-09-03T02:35:45.2746840Z Downloading biplist-1.0.3.tar.gz (21 kB)
2020-09-03T02:35:45.5845900Z Collecting smmap<4,>=3.0.1
2020-09-03T02:35:45.5894820Z Downloading smmap-3.0.4-py2.py3-none-any.whl (25 kB)
2020-09-03T02:35:45.6155270Z Collecting text-unidecode>=1.3
2020-09-03T02:35:45.6199670Z Downloading text_unidecode-1.3-py2.py3-none-any.whl (78 kB)
2020-09-03T02:35:45.7391840Z Collecting arrow
2020-09-03T02:35:45.7458520Z Downloading arrow-0.16.0-py2.py3-none-any.whl (50 kB)
2020-09-03T02:35:45.8153890Z Collecting python-dateutil>=2.7.0
2020-09-03T02:35:45.8207070Z Downloading python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
2020-09-03T02:35:45.8344490Z Using legacy 'setup.py install' for dmgbuild, since package 'wheel' is not installed.
2020-09-03T02:35:45.8346380Z Using legacy 'setup.py install' for ds-store, since package 'wheel' is not installed.
2020-09-03T02:35:45.8349050Z Using legacy 'setup.py install' for mac-alias, since package 'wheel' is not installed.
2020-09-03T02:35:45.8352230Z Using legacy 'setup.py install' for python-slugify, since package 'wheel' is not installed.
2020-09-03T02:35:45.8354260Z Using legacy 'setup.py install' for biplist, since package 'wheel' is not installed.
2020-09-03T02:35:45.9156650Z Installing collected packages: urllib3, certifi, idna, chardet, requests, biplist, mac-alias, ds-store, dmgbuild, wheel, smmap, gitdb, GitPython, MarkupSafe, Jinja2, toml, binaryornot, click, poyo, six, text-unidecode, python-slugify, python-dateutil, arrow, jinja2-time, cookiecutter, briefcase, tomlkit
2020-09-03T02:35:46.3505810Z Running setup.py install for biplist: started
2020-09-03T02:35:46.7487230Z Running setup.py install for biplist: finished with status 'done'
2020-09-03T02:35:46.7526230Z Running setup.py install for mac-alias: started
2020-09-03T02:35:47.1032730Z Running setup.py install for mac-alias: finished with status 'done'
2020-09-03T02:35:47.1080670Z Running setup.py install for ds-store: started
2020-09-03T02:35:47.4754010Z Running setup.py install for ds-store: finished with status 'done'
2020-09-03T02:35:47.4811900Z Running setup.py install for dmgbuild: started
2020-09-03T02:35:47.8218300Z Running setup.py install for dmgbuild: finished with status 'done'
2020-09-03T02:35:48.2922210Z Running setup.py install for python-slugify: started
2020-09-03T02:35:48.6092530Z Running setup.py install for python-slugify: finished with status 'done'
2020-09-03T02:35:48.9318670Z Successfully installed GitPython-3.1.7 Jinja2-2.11.2 MarkupSafe-1.1.1 arrow-0.16.0 binaryornot-0.4.4 biplist-1.0.3 briefcase-0.3.1 certifi-2020.6.20 chardet-3.0.4 click-7.1.2 cookiecutter-1.7.2 dmgbuild-1.3.3 ds-store-1.1.2 gitdb-4.0.5 idna-2.10 jinja2-time-0.2.0 mac-alias-2.0.7 poyo-0.5.0 python-dateutil-2.8.1 python-slugify-4.0.1 requests-2.24.0 six-1.15.0 smmap-3.0.4 text-unidecode-1.3 toml-0.10.1 tomlkit-0.7.0 urllib3-1.25.10 wheel-0.35.1
2020-09-03T02:35:49.3839770Z Obtaining file:///Users/runner/work/napari/napari
2020-09-03T02:35:49.3884950Z Installing build dependencies: started
2020-09-03T02:35:51.5866800Z Installing build dependencies: finished with status 'done'
2020-09-03T02:35:51.5879770Z Getting requirements to build wheel: started
2020-09-03T02:35:51.8010820Z Getting requirements to build wheel: finished with status 'done'
2020-09-03T02:35:51.8071570Z Preparing wheel metadata: started
2020-09-03T02:35:52.5959450Z Preparing wheel metadata: finished with status 'done'
2020-09-03T02:35:52.6063910Z Collecting napari-svg>=0.1.3
2020-09-03T02:35:52.6282400Z Downloading napari-svg-0.1.3.tar.gz (10 kB)
2020-09-03T02:35:53.0577530Z Collecting tifffile>=2020.2.16
2020-09-03T02:35:53.0661460Z Downloading tifffile-2020.8.25-py3-none-any.whl (147 kB)
2020-09-03T02:35:53.2818280Z Collecting dask[array]>=2.1.0
2020-09-03T02:35:53.2882390Z Downloading dask-2.25.0-py3-none-any.whl (834 kB)
2020-09-03T02:35:53.4628060Z Collecting PyYAML>=5.1
2020-09-03T02:35:53.4704380Z Downloading PyYAML-5.3.1.tar.gz (269 kB)
2020-09-03T02:35:54.1771120Z Collecting ipykernel>=5.1.1
2020-09-03T02:35:54.1844200Z Downloading ipykernel-5.3.4-py3-none-any.whl (120 kB)
2020-09-03T02:35:54.2381270Z Collecting wrapt>=1.11.1
2020-09-03T02:35:54.2427580Z Downloading wrapt-1.12.1.tar.gz (27 kB)
2020-09-03T02:35:54.6142800Z Collecting imageio>=2.5.0
2020-09-03T02:35:54.6261740Z Downloading imageio-2.9.0-py3-none-any.whl (3.3 MB)
2020-09-03T02:35:54.7973860Z Collecting napari-plugin-engine>=0.1.5
2020-09-03T02:35:54.8033720Z Downloading napari_plugin_engine-0.1.7-py3-none-any.whl (32 kB)
2020-09-03T02:35:54.8744360Z Collecting typing-extensions
2020-09-03T02:35:54.8794440Z Downloading typing_extensions-3.7.4.3-py3-none-any.whl (22 kB)
2020-09-03T02:35:54.9207640Z Collecting cachey>=0.2.1
2020-09-03T02:35:54.9260600Z Downloading cachey-0.2.1-py3-none-any.whl (6.4 kB)
2020-09-03T02:35:54.9702460Z Collecting appdirs>=1.4.4
2020-09-03T02:35:54.9773340Z Downloading appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)
2020-09-03T02:35:55.0710530Z Collecting vispy>=0.6.4
2020-09-03T02:35:55.0850730Z Downloading vispy-0.6.4.tar.gz (13.3 MB)
2020-09-03T02:35:56.1570300Z Installing build dependencies: started
2020-09-03T02:37:01.8876360Z Installing build dependencies: still running...
2020-09-03T02:38:27.2709340Z Installing build dependencies: still running...
2020-09-03T02:39:30.6465630Z Installing build dependencies: still running...
2020-09-03T02:39:30.9861290Z Installing build dependencies: finished with status 'done'
2020-09-03T02:39:31.0295450Z Getting requirements to build wheel: started
2020-09-03T02:39:36.5437790Z Getting requirements to build wheel: finished with status 'done'
2020-09-03T02:39:36.6745300Z Preparing wheel metadata: started
2020-09-03T02:39:38.6056150Z Preparing wheel metadata: finished with status 'done'
2020-09-03T02:39:39.6561220Z Collecting IPython>=7.7.0
2020-09-03T02:39:39.6691790Z Downloading ipython-7.18.1-py3-none-any.whl (786 kB)
2020-09-03T02:39:39.7791850Z Collecting toolz>=0.10.0
2020-09-03T02:39:39.7857070Z Downloading toolz-0.10.0.tar.gz (49 kB)
2020-09-03T02:39:40.7124980Z Collecting psutil>=5.0
2020-09-03T02:39:40.7260650Z Downloading psutil-5.7.2.tar.gz (460 kB)
2020-09-03T02:39:41.3298010Z Collecting PyOpenGL>=3.1.0
2020-09-03T02:39:41.3421680Z Downloading PyOpenGL-3.1.5-py3-none-any.whl (2.4 MB)
2020-09-03T02:39:41.9712930Z Collecting scipy>=1.2.0
2020-09-03T02:39:41.9783940Z Downloading scipy-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl (28.9 MB)
2020-09-03T02:39:43.5568630Z Collecting numpy>=1.10.0
2020-09-03T02:39:43.5623010Z Downloading numpy-1.19.1-cp38-cp38-macosx_10_9_x86_64.whl (15.3 MB)
2020-09-03T02:39:44.6446990Z Collecting Pillow!=7.1.0,!=7.1.1
2020-09-03T02:39:44.6680030Z Downloading Pillow-7.2.0-cp38-cp38-macosx_10_10_x86_64.whl (2.2 MB)
2020-09-03T02:39:44.8198150Z Collecting qtconsole>=4.5.1
2020-09-03T02:39:44.8291520Z Downloading qtconsole-4.7.6-py2.py3-none-any.whl (118 kB)
2020-09-03T02:39:44.8957160Z Collecting numpydoc>=0.9.2
2020-09-03T02:39:44.9031110Z Downloading numpydoc-1.1.0-py3-none-any.whl (47 kB)
2020-09-03T02:39:44.9841230Z Collecting qtpy>=1.7.0
2020-09-03T02:39:44.9902560Z Downloading QtPy-1.9.0-py2.py3-none-any.whl (54 kB)
2020-09-03T02:39:45.1331090Z Collecting PySide2<5.15.0,>=5.12.3; extra == "pyside2"
2020-09-03T02:39:45.1386120Z Downloading PySide2-5.14.2.3-5.14.2-cp35.cp36.cp37.cp38-abi3-macosx_10_13_intel.whl (148.4 MB)
2020-09-03T02:39:48.4805020Z Collecting appnope; platform_system == "Darwin"
2020-09-03T02:39:48.4858480Z Downloading appnope-0.1.0-py2.py3-none-any.whl (4.0 kB)
2020-09-03T02:39:48.5643600Z Collecting traitlets>=4.1.0
2020-09-03T02:39:48.5706200Z Downloading traitlets-5.0.2-py3-none-any.whl (97 kB)
2020-09-03T02:39:48.7216510Z Collecting jupyter-client
2020-09-03T02:39:48.7291560Z Downloading jupyter_client-6.1.7-py3-none-any.whl (108 kB)
2020-09-03T02:39:48.8812600Z Collecting tornado>=4.2
2020-09-03T02:39:48.8883320Z Downloading tornado-6.0.4.tar.gz (496 kB)
2020-09-03T02:39:49.4468880Z Collecting heapdict
2020-09-03T02:39:49.4552440Z Downloading HeapDict-1.0.1-py3-none-any.whl (3.9 kB)
2020-09-03T02:39:49.6193680Z Collecting freetype-py
2020-09-03T02:39:49.6362890Z Downloading freetype_py-2.2.0-py3-none-macosx_10_9_x86_64.whl (852 kB)
2020-09-03T02:39:49.6705490Z Requirement already satisfied: setuptools>=18.5 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from IPython>=7.7.0->napari==0.3.7rc3) (47.1.0)
2020-09-03T02:39:49.7222380Z Collecting backcall
2020-09-03T02:39:49.7281270Z Downloading backcall-0.2.0-py2.py3-none-any.whl (11 kB)
2020-09-03T02:39:49.7870510Z Collecting pexpect>4.3; sys_platform != "win32"
2020-09-03T02:39:49.7935350Z Downloading pexpect-4.8.0-py2.py3-none-any.whl (59 kB)
2020-09-03T02:39:49.9224170Z Collecting decorator
2020-09-03T02:39:49.9276780Z Downloading decorator-4.4.2-py2.py3-none-any.whl (9.2 kB)
2020-09-03T02:39:50.0503800Z Collecting pygments
2020-09-03T02:39:50.0611070Z Downloading Pygments-2.6.1-py3-none-any.whl (914 kB)
2020-09-03T02:39:50.1457410Z Collecting pickleshare
2020-09-03T02:39:50.1507220Z Downloading pickleshare-0.7.5-py2.py3-none-any.whl (6.9 kB)
2020-09-03T02:39:50.2731670Z Collecting jedi>=0.10
2020-09-03T02:39:50.2834900Z Downloading jedi-0.17.2-py2.py3-none-any.whl (1.4 MB)
2020-09-03T02:39:50.5139430Z Collecting prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0
2020-09-03T02:39:50.5212690Z Downloading prompt_toolkit-3.0.7-py3-none-any.whl (355 kB)
2020-09-03T02:39:50.6399960Z Collecting jupyter-core
2020-09-03T02:39:50.6461750Z Downloading jupyter_core-4.6.3-py2.py3-none-any.whl (83 kB)
2020-09-03T02:39:51.1129460Z Collecting pyzmq>=17.1
2020-09-03T02:39:51.1214760Z Downloading pyzmq-19.0.2-cp38-cp38-macosx_10_9_x86_64.whl (806 kB)
2020-09-03T02:39:51.1818800Z Collecting ipython-genutils
2020-09-03T02:39:51.1876600Z Downloading ipython_genutils-0.2.0-py2.py3-none-any.whl (26 kB)
2020-09-03T02:39:51.4711260Z Collecting sphinx>=1.6.5
2020-09-03T02:39:51.4831460Z Downloading Sphinx-3.2.1-py3-none-any.whl (2.9 MB)
2020-09-03T02:39:51.5902800Z Requirement already satisfied: Jinja2>=2.3 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from numpydoc>=0.9.2->napari==0.3.7rc3) (2.11.2)
2020-09-03T02:39:51.6749000Z Collecting shiboken2==5.14.2.3
2020-09-03T02:39:51.6859180Z Downloading shiboken2-5.14.2.3-5.14.2-cp35.cp36.cp37.cp38-abi3-macosx_10_13_intel.whl (837 kB)
2020-09-03T02:39:51.7200960Z Requirement already satisfied: python-dateutil>=2.1 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from jupyter-client->ipykernel>=5.1.1->napari==0.3.7rc3) (2.8.1)
2020-09-03T02:39:51.7554890Z Collecting ptyprocess>=0.5
2020-09-03T02:39:51.7620090Z Downloading ptyprocess-0.6.0-py2.py3-none-any.whl (39 kB)
2020-09-03T02:39:51.8849670Z Collecting parso<0.8.0,>=0.7.0
2020-09-03T02:39:51.8909480Z Downloading parso-0.7.1-py2.py3-none-any.whl (109 kB)
2020-09-03T02:39:51.9804230Z Collecting wcwidth
2020-09-03T02:39:51.9861070Z Downloading wcwidth-0.2.5-py2.py3-none-any.whl (30 kB)
2020-09-03T02:39:52.0375900Z Collecting sphinxcontrib-qthelp
2020-09-03T02:39:52.0428640Z Downloading sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl (90 kB)
2020-09-03T02:39:52.1618700Z Collecting babel>=1.3
2020-09-03T02:39:52.1683090Z Downloading Babel-2.8.0-py2.py3-none-any.whl (8.6 MB)
2020-09-03T02:39:52.5541610Z Collecting packaging
2020-09-03T02:39:52.5609070Z Downloading packaging-20.4-py2.py3-none-any.whl (37 kB)
2020-09-03T02:39:52.6074590Z Collecting snowballstemmer>=1.1
2020-09-03T02:39:52.6127860Z Downloading snowballstemmer-2.0.0-py2.py3-none-any.whl (97 kB)
2020-09-03T02:39:52.6574180Z Collecting sphinxcontrib-jsmath
2020-09-03T02:39:52.6629360Z Downloading sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl (5.1 kB)
2020-09-03T02:39:52.7290580Z Collecting docutils>=0.12
2020-09-03T02:39:52.7368660Z Downloading docutils-0.16-py2.py3-none-any.whl (548 kB)
2020-09-03T02:39:52.7973870Z Collecting sphinxcontrib-devhelp
2020-09-03T02:39:52.8058340Z Downloading sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl (84 kB)
2020-09-03T02:39:52.9085150Z Collecting imagesize
2020-09-03T02:39:52.9162720Z Downloading imagesize-1.2.0-py2.py3-none-any.whl (4.8 kB)
2020-09-03T02:39:52.9823670Z Collecting alabaster<0.8,>=0.7
2020-09-03T02:39:52.9876830Z Downloading alabaster-0.7.12-py2.py3-none-any.whl (14 kB)
2020-09-03T02:39:53.0473690Z Collecting sphinxcontrib-serializinghtml
2020-09-03T02:39:53.0544940Z Downloading sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl (89 kB)
2020-09-03T02:39:53.1053410Z Collecting sphinxcontrib-applehelp
2020-09-03T02:39:53.1118850Z Downloading sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl (121 kB)
2020-09-03T02:39:53.1658680Z Collecting sphinxcontrib-htmlhelp
2020-09-03T02:39:53.1714570Z Downloading sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl (96 kB)
2020-09-03T02:39:53.1981710Z Requirement already satisfied: requests>=2.5.0 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (2.24.0)
2020-09-03T02:39:53.2155500Z Requirement already satisfied: MarkupSafe>=0.23 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from Jinja2>=2.3->numpydoc>=0.9.2->napari==0.3.7rc3) (1.1.1)
2020-09-03T02:39:53.2170660Z Requirement already satisfied: six>=1.5 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from python-dateutil>=2.1->jupyter-client->ipykernel>=5.1.1->napari==0.3.7rc3) (1.15.0)
2020-09-03T02:39:53.5410330Z Collecting pytz>=2015.7
2020-09-03T02:39:53.5558830Z Downloading pytz-2020.1-py2.py3-none-any.whl (510 kB)
2020-09-03T02:39:53.8194700Z Collecting pyparsing>=2.0.2
2020-09-03T02:39:53.8336310Z Downloading pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
2020-09-03T02:39:53.8462500Z Requirement already satisfied: certifi>=2017.4.17 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from requests>=2.5.0->sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (2020.6.20)
2020-09-03T02:39:53.8474410Z Requirement already satisfied: chardet<4,>=3.0.2 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from requests>=2.5.0->sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (3.0.4)
2020-09-03T02:39:53.8490220Z Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from requests>=2.5.0->sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (1.25.10)
2020-09-03T02:39:53.8657670Z Requirement already satisfied: idna<3,>=2.5 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from requests>=2.5.0->sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (2.10)
2020-09-03T02:39:53.8683200Z Building wheels for collected packages: napari-svg, PyYAML, wrapt, vispy, toolz, psutil, tornado
2020-09-03T02:39:53.8692310Z Building wheel for napari-svg (setup.py): started
2020-09-03T02:39:54.2972390Z Building wheel for napari-svg (setup.py): finished with status 'done'
2020-09-03T02:39:54.2981920Z Created wheel for napari-svg: filename=napari_svg-0.1.3-py3-none-any.whl size=11972 sha256=b56b3fc5f59ea032cdafa816c21a75e8413e34e29d8bdaf55f7207fef867fd73
2020-09-03T02:39:54.2982900Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/96/12/d9/a0a4a8914067f0ab668322ba07270d05475000398c3530ff9a
2020-09-03T02:39:54.3005750Z Building wheel for PyYAML (setup.py): started
2020-09-03T02:40:01.8259850Z Building wheel for PyYAML (setup.py): finished with status 'done'
2020-09-03T02:40:01.8274300Z Created wheel for PyYAML: filename=PyYAML-5.3.1-cp38-cp38-macosx_10_14_x86_64.whl size=156398 sha256=89b1292cba9ce9e0d2cca6536afce3524cb40d858434046a4c63ee9a586b26b6
2020-09-03T02:40:01.8274640Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/13/90/db/290ab3a34f2ef0b5a0f89235dc2d40fea83e77de84ed2dc05c
2020-09-03T02:40:01.8294650Z Building wheel for wrapt (setup.py): started
2020-09-03T02:40:03.1518600Z Building wheel for wrapt (setup.py): finished with status 'done'
2020-09-03T02:40:03.1528330Z Created wheel for wrapt: filename=wrapt-1.12.1-cp38-cp38-macosx_10_14_x86_64.whl size=32551 sha256=a88ea091adec50c531d211172c22cd63f56e1773a3dbae6e7f466b2e2f65f47b
2020-09-03T02:40:03.1528680Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/5f/fd/9e/b6cf5890494cb8ef0b5eaff72e5d55a70fb56316007d6dfe73
2020-09-03T02:40:03.1570890Z Building wheel for vispy (PEP 517): started
2020-09-03T02:40:07.1746870Z Building wheel for vispy (PEP 517): finished with status 'done'
2020-09-03T02:40:07.1839520Z Created wheel for vispy: filename=vispy-0.6.4-cp38-cp38-macosx_10_14_x86_64.whl size=2241226 sha256=bca6969890582429a938dabdd7308e977084aac5c99b3a707251e9ab8e468125
2020-09-03T02:40:07.1840430Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/60/f9/ba/c802b3bb175c9f158667b47f71be5aab048f25cb5c46c69646
2020-09-03T02:40:07.1856340Z Building wheel for toolz (setup.py): started
2020-09-03T02:40:07.7105510Z Building wheel for toolz (setup.py): finished with status 'done'
2020-09-03T02:40:07.7118680Z Created wheel for toolz: filename=toolz-0.10.0-py3-none-any.whl size=55576 sha256=3397b54c3ad0cff79496927025731eaa738666d16c1dc747f8164357b05adfa5
2020-09-03T02:40:07.7118900Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/a5/2b/b5/05758d5828d65f2adef8fbb5d5484e4adb946ae1827a973a01
2020-09-03T02:40:07.7135860Z Building wheel for psutil (setup.py): started
2020-09-03T02:40:10.0479350Z Building wheel for psutil (setup.py): finished with status 'done'
2020-09-03T02:40:10.0498990Z Created wheel for psutil: filename=psutil-5.7.2-cp38-cp38-macosx_10_14_x86_64.whl size=234086 sha256=1900da33cbe77f7b739fec92353c49665a7eab652259fcf5f5e9bc932b5e0263
2020-09-03T02:40:10.0499240Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/91/cf/b0/0c9998060b55ca80ea7a50a8639c3bdc6ba886eeff014bc9ac
2020-09-03T02:40:10.0515840Z Building wheel for tornado (setup.py): started
2020-09-03T02:40:10.9626160Z Building wheel for tornado (setup.py): finished with status 'done'
2020-09-03T02:40:11.7233010Z Created wheel for tornado: filename=tornado-6.0.4-cp38-cp38-macosx_10_14_x86_64.whl size=417088 sha256=9304a9ebfd922a8bbb0ce05dc8e339ba5ec60c70aa60ef3c28f15208a01d71e3
2020-09-03T02:40:11.7233300Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/88/79/e5/598ba17e85eccf2626eab62e4ee8452895636cd542650d450d
2020-09-03T02:40:11.7233840Z Successfully built napari-svg PyYAML wrapt vispy toolz psutil tornado
2020-09-03T02:40:11.7234990Z Installing collected packages: Pillow, numpy, imageio, napari-plugin-engine, freetype-py, vispy, napari-svg, tifffile, PyYAML, toolz, dask, appnope, ipython-genutils, traitlets, pyzmq, tornado, jupyter-core, jupyter-client, backcall, ptyprocess, pexpect, decorator, pygments, pickleshare, parso, jedi, wcwidth, prompt-toolkit, IPython, ipykernel, wrapt, typing-extensions, heapdict, cachey, appdirs, psutil, PyOpenGL, scipy, qtpy, qtconsole, sphinxcontrib-qthelp, pytz, babel, pyparsing, packaging, snowballstemmer, sphinxcontrib-jsmath, docutils, sphinxcontrib-devhelp, imagesize, alabaster, sphinxcontrib-serializinghtml, sphinxcontrib-applehelp, sphinxcontrib-htmlhelp, sphinx, numpydoc, shiboken2, PySide2, napari
2020-09-03T02:40:34.4527210Z Running setup.py develop for napari
2020-09-03T02:40:35.3975740Z Successfully installed IPython-7.18.1 Pillow-7.2.0 PyOpenGL-3.1.5 PySide2-5.14.2.3 PyYAML-5.3.1 alabaster-0.7.12 appdirs-1.4.4 appnope-0.1.0 babel-2.8.0 backcall-0.2.0 cachey-0.2.1 dask-2.25.0 decorator-4.4.2 docutils-0.16 freetype-py-2.2.0 heapdict-1.0.1 imageio-2.9.0 imagesize-1.2.0 ipykernel-5.3.4 ipython-genutils-0.2.0 jedi-0.17.2 jupyter-client-6.1.7 jupyter-core-4.6.3 napari napari-plugin-engine-0.1.7 napari-svg-0.1.3 numpy-1.19.1 numpydoc-1.1.0 packaging-20.4 parso-0.7.1 pexpect-4.8.0 pickleshare-0.7.5 prompt-toolkit-3.0.7 psutil-5.7.2 ptyprocess-0.6.0 pygments-2.6.1 pyparsing-2.4.7 pytz-2020.1 pyzmq-19.0.2 qtconsole-4.7.6 qtpy-1.9.0 scipy-1.5.2 shiboken2-5.14.2.3 snowballstemmer-2.0.0 sphinx-3.2.1 sphinxcontrib-applehelp-1.0.2 sphinxcontrib-devhelp-1.0.2 sphinxcontrib-htmlhelp-1.0.3 sphinxcontrib-jsmath-1.0.1 sphinxcontrib-qthelp-1.0.3 sphinxcontrib-serializinghtml-1.1.4 tifffile-2020.8.25 toolz-0.10.0 tornado-6.0.4 traitlets-5.0.2 typing-extensions-3.7.4.3 vispy-0.6.4 wcwidth-0.2.5 wrapt-1.12.1
2020-09-03T02:40:36.2706120Z ##[group]Run VER=`python bundle.py --version`
2020-09-03T02:40:36.2706390Z �[36;1mVER=`python bundle.py --version`�[0m
2020-09-03T02:40:36.2706500Z �[36;1mecho "::set-env name=version::$VER"�[0m
2020-09-03T02:40:36.2706600Z �[36;1mecho $VER�[0m
2020-09-03T02:40:36.3044610Z shell: /bin/bash --noprofile --norc -e -o pipefail {0}
2020-09-03T02:40:36.3045170Z env:
2020-09-03T02:40:36.3046590Z GITHUB_TOKEN: ***
2020-09-03T02:40:36.3046710Z DISPLAY: :99.0
2020-09-03T02:40:36.3046830Z pythonLocation: /Users/runner/hostedtoolcache/Python/3.8.5/x64
2020-09-03T02:40:36.3046930Z ##[endgroup]
2020-09-03T02:40:36.3943780Z 0.3.7rc3
2020-09-03T02:40:36.3966720Z ##[group]Run python -m bundle
2020-09-03T02:40:36.3966910Z �[36;1mpython -m bundle�[0m
2020-09-03T02:40:36.4130090Z shell: /bin/bash -e {0}
2020-09-03T02:40:36.4130240Z env:
2020-09-03T02:40:36.4130750Z GITHUB_TOKEN: ***
2020-09-03T02:40:36.4130920Z DISPLAY: :99.0
2020-09-03T02:40:36.4131020Z pythonLocation: /Users/runner/hostedtoolcache/Python/3.8.5/x64
2020-09-03T02:40:36.4131150Z version: 0.3.7rc3
2020-09-03T02:40:36.4131280Z ##[endgroup]
2020-09-03T02:40:45.0358100Z Unable to create basic Accelerated OpenGL renderer.
2020-09-03T02:40:45.0360730Z Unable to create basic Accelerated OpenGL renderer.
2020-09-03T02:40:45.0364170Z Core Image is now using the software OpenGL renderer. This will be slow.
2020-09-03T02:40:45.3454630Z napari: 0.3.7rc3
2020-09-03T02:40:45.3455880Z Platform: macOS-10.15.6-x86_64-i386-64bit
2020-09-03T02:40:45.3456780Z Python: 3.8.5 (default, Jul 21 2020, 12:20:54) [Clang 11.0.0 (clang-1100.0.33.17)]
2020-09-03T02:40:45.3457010Z Qt: 5.14.2
2020-09-03T02:40:45.3457260Z PySide2: 5.14.2.3
2020-09-03T02:40:45.3457410Z NumPy: 1.19.1
2020-09-03T02:40:45.3457540Z SciPy: 1.5.2
2020-09-03T02:40:45.3457670Z Dask: 2.25.0
2020-09-03T02:40:45.3457800Z VisPy: 0.6.4
2020-09-03T02:40:45.3457860Z
2020-09-03T02:40:45.3458510Z GL version: 2.1 APPLE-17.10.22
2020-09-03T02:40:45.3458680Z MAX_TEXTURE_SIZE: 16384
2020-09-03T02:40:45.3458740Z
2020-09-03T02:40:45.3458870Z Plugins:
2020-09-03T02:40:45.3459780Z - napari_plugin_engine: 0.1.7
2020-09-03T02:40:45.3460400Z - svg: 0.1.3
2020-09-03T02:40:47.9385040Z
2020-09-03T02:40:47.9451560Z [napari] Generating application template...
2020-09-03T02:40:47.9453400Z Using app template: https://github.com/beeware/briefcase-macOS-app-template.git
2020-09-03T02:40:47.9567460Z
2020-09-03T02:40:47.9587980Z [napari] Installing support package...
2020-09-03T02:40:47.9620720Z Using support package https://briefcase-support.org/python?platform=macOS&version=3.8
2020-09-03T02:40:47.9625140Z ... using most recent revision
2020-09-03T02:40:47.9631410Z Downloading Python-3.8-macOS-support.b3.tar.gz...
2020-09-03T02:40:47.9631640Z
2020-09-03T02:40:48.0113290Z ######............................................ 12%
2020-09-03T02:40:48.0829080Z #############..................................... 26%
2020-09-03T02:40:48.1490490Z ####################.............................. 40%
2020-09-03T02:40:48.1563150Z ###########################....................... 54%
2020-09-03T02:40:48.2216300Z #################################................. 66%
2020-09-03T02:40:48.2273640Z ########################################.......... 80%
2020-09-03T02:40:48.2316450Z ###############################################... 94%
2020-09-03T02:40:49.1551270Z ################################################## 100%Ignoring importlib-metadata: markers 'python_version < "3.8"' don't match your environment
2020-09-03T02:40:49.4399680Z Collecting appdirs>=1.4.4
2020-09-03T02:40:49.4474270Z Using cached appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)
2020-09-03T02:40:49.4744440Z Collecting cachey>=0.2.1
2020-09-03T02:40:49.4777690Z Using cached cachey-0.2.1-py3-none-any.whl (6.4 kB)
2020-09-03T02:40:49.6660720Z Collecting dask[array]>=2.1.0
2020-09-03T02:40:49.6721850Z Using cached dask-2.25.0-py3-none-any.whl (834 kB)
2020-09-03T02:40:49.7635230Z Collecting imageio>=2.5.0
2020-09-03T02:40:49.7767750Z Using cached imageio-2.9.0-py3-none-any.whl (3.3 MB)
2020-09-03T02:40:49.9107170Z Collecting ipykernel>=5.1.1
2020-09-03T02:40:49.9146880Z Using cached ipykernel-5.3.4-py3-none-any.whl (120 kB)
2020-09-03T02:40:50.0892930Z Collecting IPython>=7.7.0
2020-09-03T02:40:50.0937160Z Using cached ipython-7.18.1-py3-none-any.whl (786 kB)
2020-09-03T02:40:50.1820240Z Collecting napari-plugin-engine>=0.1.5
2020-09-03T02:40:50.1860010Z Using cached napari_plugin_engine-0.1.7-py3-none-any.whl (32 kB)
2020-09-03T02:40:50.2133050Z Processing /Users/runner/Library/Caches/pip/wheels/96/12/d9/a0a4a8914067f0ab668322ba07270d05475000398c3530ff9a/napari_svg-0.1.3-py3-none-any.whl
2020-09-03T02:40:51.2184750Z Collecting numpy>=1.10.0
2020-09-03T02:40:51.2742670Z Using cached numpy-1.19.1-cp38-cp38-macosx_10_9_x86_64.whl (15.3 MB)
2020-09-03T02:40:51.4277760Z Collecting numpydoc>=0.9.2
2020-09-03T02:40:51.4312250Z Using cached numpydoc-1.1.0-py3-none-any.whl (47 kB)
2020-09-03T02:40:52.3438360Z Collecting Pillow!=7.1.0,!=7.1.1
2020-09-03T02:40:52.3550660Z Using cached Pillow-7.2.0-cp38-cp38-macosx_10_10_x86_64.whl (2.2 MB)
2020-09-03T02:40:52.7859740Z Processing /Users/runner/Library/Caches/pip/wheels/91/cf/b0/0c9998060b55ca80ea7a50a8639c3bdc6ba886eeff014bc9ac/psutil-5.7.2-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:40:52.8305390Z Collecting PyOpenGL>=3.1.0
2020-09-03T02:40:52.8412990Z Using cached PyOpenGL-3.1.5-py3-none-any.whl (2.4 MB)
2020-09-03T02:40:52.9781240Z Processing /Users/runner/Library/Caches/pip/wheels/13/90/db/290ab3a34f2ef0b5a0f89235dc2d40fea83e77de84ed2dc05c/PyYAML-5.3.1-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:40:53.0518640Z Collecting qtconsole>=4.5.1
2020-09-03T02:40:53.0560750Z Using cached qtconsole-4.7.6-py2.py3-none-any.whl (118 kB)
2020-09-03T02:40:53.1125350Z Collecting qtpy>=1.7.0
2020-09-03T02:40:53.1163800Z Using cached QtPy-1.9.0-py2.py3-none-any.whl (54 kB)
2020-09-03T02:40:53.6262670Z Collecting scipy>=1.2.0
2020-09-03T02:40:53.7264920Z Using cached scipy-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl (28.9 MB)
2020-09-03T02:40:54.0692690Z Collecting tifffile>=2020.2.16
2020-09-03T02:40:54.0747200Z Using cached tifffile-2020.8.25-py3-none-any.whl (147 kB)
2020-09-03T02:40:54.1082030Z Processing /Users/runner/Library/Caches/pip/wheels/a5/2b/b5/05758d5828d65f2adef8fbb5d5484e4adb946ae1827a973a01/toolz-0.10.0-py3-none-any.whl
2020-09-03T02:40:54.1550280Z Collecting typing_extensions
2020-09-03T02:40:54.1589100Z Using cached typing_extensions-3.7.4.3-py3-none-any.whl (22 kB)
2020-09-03T02:40:54.2299530Z Processing /Users/runner/Library/Caches/pip/wheels/60/f9/ba/c802b3bb175c9f158667b47f71be5aab048f25cb5c46c69646/vispy-0.6.4-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:40:54.2734070Z Processing /Users/runner/Library/Caches/pip/wheels/5f/fd/9e/b6cf5890494cb8ef0b5eaff72e5d55a70fb56316007d6dfe73/wrapt-1.12.1-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:40:54.5394230Z Collecting pip
2020-09-03T02:40:54.5981030Z Using cached pip-20.2.2-py2.py3-none-any.whl (1.5 MB)
2020-09-03T02:40:54.7042890Z Collecting PySide2==5.14.2.2
2020-09-03T02:40:58.7199270Z Downloading PySide2-5.14.2.2-5.14.2-cp35.cp36.cp37.cp38-abi3-macosx_10_13_intel.whl (148.4 MB)
2020-09-03T02:41:01.9538070Z Collecting scikit-image
2020-09-03T02:41:01.9606190Z Downloading scikit_image-0.17.2-cp38-cp38-macosx_10_13_x86_64.whl (12.2 MB)
2020-09-03T02:41:02.3319000Z Collecting zarr
2020-09-03T02:41:02.3434310Z Downloading zarr-2.4.0.tar.gz (3.3 MB)
2020-09-03T02:41:07.0822370Z Collecting heapdict
2020-09-03T02:41:07.0860100Z Using cached HeapDict-1.0.1-py3-none-any.whl (3.9 kB)
2020-09-03T02:41:07.1871560Z Collecting traitlets>=4.1.0
2020-09-03T02:41:07.1911740Z Using cached traitlets-5.0.2-py3-none-any.whl (97 kB)
2020-09-03T02:41:07.3214260Z Collecting jupyter-client
2020-09-03T02:41:07.3257950Z Using cached jupyter_client-6.1.7-py3-none-any.whl (108 kB)
2020-09-03T02:41:07.3601900Z Collecting appnope; platform_system == "Darwin"
2020-09-03T02:41:07.3634710Z Using cached appnope-0.1.0-py2.py3-none-any.whl (4.0 kB)
2020-09-03T02:41:07.4391480Z Processing /Users/runner/Library/Caches/pip/wheels/88/79/e5/598ba17e85eccf2626eab62e4ee8452895636cd542650d450d/tornado-6.0.4-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:41:07.5423950Z Collecting jedi>=0.10
2020-09-03T02:41:07.5498900Z Using cached jedi-0.17.2-py2.py3-none-any.whl (1.4 MB)
2020-09-03T02:41:07.6322640Z Collecting pickleshare
2020-09-03T02:41:07.6355580Z Using cached pickleshare-0.7.5-py2.py3-none-any.whl (6.9 kB)
2020-09-03T02:41:07.7332450Z Collecting pygments
2020-09-03T02:41:07.7373580Z Using cached Pygments-2.6.1-py3-none-any.whl (914 kB)
2020-09-03T02:41:07.7664890Z Collecting backcall
2020-09-03T02:41:07.7712570Z Using cached backcall-0.2.0-py2.py3-none-any.whl (11 kB)
2020-09-03T02:41:09.0442260Z Collecting setuptools>=18.5
2020-09-03T02:41:09.0485910Z Using cached setuptools-50.1.0-py3-none-any.whl (784 kB)
2020-09-03T02:41:09.1642870Z Collecting pexpect>4.3; sys_platform != "win32"
2020-09-03T02:41:09.1677040Z Using cached pexpect-4.8.0-py2.py3-none-any.whl (59 kB)
2020-09-03T02:41:09.3221890Z Collecting prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0
2020-09-03T02:41:09.3276940Z Using cached prompt_toolkit-3.0.7-py3-none-any.whl (355 kB)
2020-09-03T02:41:09.4480460Z Collecting decorator
2020-09-03T02:41:09.4514030Z Using cached decorator-4.4.2-py2.py3-none-any.whl (9.2 kB)
2020-09-03T02:41:09.7287840Z Collecting sphinx>=1.6.5
2020-09-03T02:41:09.7416880Z Using cached Sphinx-3.2.1-py3-none-any.whl (2.9 MB)
2020-09-03T02:41:09.9063450Z Collecting Jinja2>=2.3
2020-09-03T02:41:09.9102160Z Using cached Jinja2-2.11.2-py2.py3-none-any.whl (125 kB)
2020-09-03T02:41:09.9352940Z Collecting ipython-genutils
2020-09-03T02:41:09.9386200Z Using cached ipython_genutils-0.2.0-py2.py3-none-any.whl (26 kB)
2020-09-03T02:41:10.3573510Z Collecting pyzmq>=17.1
2020-09-03T02:41:10.3640020Z Using cached pyzmq-19.0.2-cp38-cp38-macosx_10_9_x86_64.whl (806 kB)
2020-09-03T02:41:10.4607710Z Collecting jupyter-core
2020-09-03T02:41:10.4641930Z Using cached jupyter_core-4.6.3-py2.py3-none-any.whl (83 kB)
2020-09-03T02:41:10.6073990Z Collecting freetype-py
2020-09-03T02:41:10.6130830Z Using cached freetype_py-2.2.0-py3-none-macosx_10_9_x86_64.whl (852 kB)
2020-09-03T02:41:10.6927560Z Collecting shiboken2==5.14.2.2
2020-09-03T02:41:10.7030870Z Downloading shiboken2-5.14.2.2-5.14.2-cp35.cp36.cp37.cp38-abi3-macosx_10_13_intel.whl (834 kB)
2020-09-03T02:41:10.7865420Z Collecting networkx>=2.0
2020-09-03T02:41:10.8021370Z Downloading networkx-2.5-py3-none-any.whl (1.6 MB)
2020-09-03T02:41:11.3343770Z Collecting matplotlib!=3.0.0,>=2.0.0
2020-09-03T02:41:11.3448330Z Downloading matplotlib-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl (8.5 MB)
2020-09-03T02:41:11.7170830Z Collecting PyWavelets>=1.1.1
2020-09-03T02:41:11.7390120Z Downloading PyWavelets-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl (4.3 MB)
2020-09-03T02:41:11.8591060Z Collecting asciitree
2020-09-03T02:41:11.8645460Z Downloading asciitree-0.3.3.tar.gz (4.0 kB)
2020-09-03T02:41:12.2800200Z Collecting fasteners
2020-09-03T02:41:12.2880100Z Downloading fasteners-0.15-py2.py3-none-any.whl (23 kB)
2020-09-03T02:41:12.3425610Z Collecting numcodecs>=0.6.4
2020-09-03T02:41:12.3592890Z Downloading numcodecs-0.6.4.tar.gz (3.8 MB)
2020-09-03T02:41:16.2554350Z Collecting python-dateutil>=2.1
2020-09-03T02:41:16.2603290Z Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
2020-09-03T02:41:16.3085800Z Collecting parso<0.8.0,>=0.7.0
2020-09-03T02:41:16.3129770Z Using cached parso-0.7.1-py2.py3-none-any.whl (109 kB)
2020-09-03T02:41:16.3435910Z Collecting ptyprocess>=0.5
2020-09-03T02:41:16.3469830Z Using cached ptyprocess-0.6.0-py2.py3-none-any.whl (39 kB)
2020-09-03T02:41:16.4113450Z Collecting wcwidth
2020-09-03T02:41:16.4148380Z Using cached wcwidth-0.2.5-py2.py3-none-any.whl (30 kB)
2020-09-03T02:41:16.7047770Z Collecting requests>=2.5.0
2020-09-03T02:41:16.7082870Z Using cached requests-2.24.0-py2.py3-none-any.whl (61 kB)
2020-09-03T02:41:16.7957220Z Collecting babel>=1.3
2020-09-03T02:41:16.8235950Z Using cached Babel-2.8.0-py2.py3-none-any.whl (8.6 MB)
2020-09-03T02:41:16.9270840Z Collecting snowballstemmer>=1.1
2020-09-03T02:41:16.9305520Z Using cached snowballstemmer-2.0.0-py2.py3-none-any.whl (97 kB)
2020-09-03T02:41:16.9555860Z Collecting sphinxcontrib-devhelp
2020-09-03T02:41:16.9587650Z Using cached sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl (84 kB)
2020-09-03T02:41:17.0132190Z Collecting docutils>=0.12
2020-09-03T02:41:17.0179410Z Using cached docutils-0.16-py2.py3-none-any.whl (548 kB)
2020-09-03T02:41:17.0950950Z Collecting alabaster<0.8,>=0.7
2020-09-03T02:41:17.0986410Z Using cached alabaster-0.7.12-py2.py3-none-any.whl (14 kB)
2020-09-03T02:41:17.1204700Z Collecting sphinxcontrib-jsmath
2020-09-03T02:41:17.1236010Z Using cached sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl (5.1 kB)
2020-09-03T02:41:17.1656770Z Collecting imagesize
2020-09-03T02:41:17.1687770Z Using cached imagesize-1.2.0-py2.py3-none-any.whl (4.8 kB)
2020-09-03T02:41:17.2115070Z Collecting sphinxcontrib-serializinghtml
2020-09-03T02:41:17.2150660Z Using cached sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl (89 kB)
2020-09-03T02:41:17.2460570Z Collecting sphinxcontrib-applehelp
2020-09-03T02:41:17.2497690Z Using cached sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl (121 kB)
2020-09-03T02:41:17.2898000Z Collecting sphinxcontrib-qthelp
2020-09-03T02:41:17.2930700Z Using cached sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl (90 kB)
2020-09-03T02:41:17.4307010Z Collecting packaging
2020-09-03T02:41:17.4343600Z Using cached packaging-20.4-py2.py3-none-any.whl (37 kB)
2020-09-03T02:41:17.4660630Z Collecting sphinxcontrib-htmlhelp
2020-09-03T02:41:17.4694320Z Using cached sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl (96 kB)
2020-09-03T02:41:17.5392560Z Collecting MarkupSafe>=0.23
2020-09-03T02:41:17.5426560Z Using cached MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl (16 kB)
2020-09-03T02:41:17.5867180Z Collecting certifi>=2020.06.20
2020-09-03T02:41:17.5906980Z Using cached certifi-2020.6.20-py2.py3-none-any.whl (156 kB)
2020-09-03T02:41:17.6490500Z Collecting kiwisolver>=1.0.1
2020-09-03T02:41:17.6552170Z Downloading kiwisolver-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl (60 kB)
2020-09-03T02:41:17.8285360Z Collecting pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3
2020-09-03T02:41:17.8326130Z Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
2020-09-03T02:41:17.8551920Z Collecting cycler>=0.10
2020-09-03T02:41:17.8601540Z Downloading cycler-0.10.0-py2.py3-none-any.whl (6.5 kB)
2020-09-03T02:41:17.9439770Z Collecting six
2020-09-03T02:41:17.9474430Z Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
2020-09-03T02:41:17.9847280Z Collecting monotonic>=0.1
2020-09-03T02:41:17.9901560Z Downloading monotonic-1.5-py2.py3-none-any.whl (5.3 kB)
2020-09-03T02:41:18.1080580Z Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
2020-09-03T02:41:18.1114400Z Using cached urllib3-1.25.10-py2.py3-none-any.whl (127 kB)
2020-09-03T02:41:18.1688530Z Collecting idna<3,>=2.5
2020-09-03T02:41:18.1721260Z Using cached idna-2.10-py2.py3-none-any.whl (58 kB)
2020-09-03T02:41:18.2051430Z Collecting chardet<4,>=3.0.2
2020-09-03T02:41:18.2085000Z Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
2020-09-03T02:41:18.4482410Z Collecting pytz>=2015.7
2020-09-03T02:41:18.4533040Z Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
2020-09-03T02:41:18.4687840Z Building wheels for collected packages: zarr, asciitree, numcodecs
2020-09-03T02:41:18.4697280Z Building wheel for zarr (setup.py): started
2020-09-03T02:41:19.2775590Z Building wheel for zarr (setup.py): finished with status 'done'
2020-09-03T02:41:19.2792370Z Created wheel for zarr: filename=zarr-2.4.0-py3-none-any.whl size=127065 sha256=28f0b6b7ca3ac4b7576e535370aa122283c0065d8c890fc32a363680791169b9
2020-09-03T02:41:19.2793050Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/73/45/aa/2472a351a13ce1a2a4fe06149435fd6ffe37c31562037035f8
2020-09-03T02:41:19.2820360Z Building wheel for asciitree (setup.py): started
2020-09-03T02:41:19.7262010Z Building wheel for asciitree (setup.py): finished with status 'done'
2020-09-03T02:41:19.7269550Z Created wheel for asciitree: filename=asciitree-0.3.3-py3-none-any.whl size=5035 sha256=19dbca1fbbc1378ac4bae0b4fa8b23bdabb5cd4412caa4bcfd01fea29b260ce1
2020-09-03T02:41:19.7269880Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/a3/d7/75/19cd0d2a893cad4bb0b2b16dd572ad2916d19c0d5ee9612511
2020-09-03T02:41:19.7287890Z Building wheel for numcodecs (setup.py): started
2020-09-03T02:42:20.9372110Z Building wheel for numcodecs (setup.py): still running...
2020-09-03T02:43:11.6848020Z Building wheel for numcodecs (setup.py): finished with status 'done'
2020-09-03T02:43:11.6890190Z Created wheel for numcodecs: filename=numcodecs-0.6.4-cp38-cp38-macosx_10_14_x86_64.whl size=1207253 sha256=367f4cab4fd6ecd0c0687510215f9ebc8fa1b3a4f75ba547ca0f23528976b60c
2020-09-03T02:43:11.6890430Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/06/84/42/9947cf5ce52463b17e99e7c89be03025256fe4e1c534d184cb
2020-09-03T02:43:11.6902650Z Successfully built zarr asciitree numcodecs
2020-09-03T02:43:12.7127470Z Installing collected packages: appdirs, heapdict, cachey, PyYAML, numpy, toolz, dask, Pillow, imageio, parso, jedi, appnope, ipython-genutils, traitlets, pickleshare, pygments, backcall, setuptools, ptyprocess, pexpect, wcwidth, prompt-toolkit, decorator, IPython, six, python-dateutil, jupyter-core, pyzmq, tornado, jupyter-client, ipykernel, napari-plugin-engine, freetype-py, vispy, napari-svg, urllib3, idna, certifi, chardet, requests, pytz, babel, snowballstemmer, sphinxcontrib-devhelp, docutils, alabaster, sphinxcontrib-jsmath, imagesize, sphinxcontrib-serializinghtml, sphinxcontrib-applehelp, sphinxcontrib-qthelp, MarkupSafe, Jinja2, pyparsing, packaging, sphinxcontrib-htmlhelp, sphinx, numpydoc, psutil, PyOpenGL, qtpy, qtconsole, scipy, tifffile, typing-extensions, wrapt, pip, shiboken2, PySide2, networkx, kiwisolver, cycler, matplotlib, PyWavelets, scikit-image, asciitree, monotonic, fasteners, numcodecs, zarr
2020-09-03T02:43:46.0716570Z Successfully installed IPython-7.18.1 Jinja2-2.11.2 MarkupSafe-1.1.1 Pillow-7.2.0 PyOpenGL-3.1.5 PySide2-5.14.2.2 PyWavelets-1.1.1 PyYAML-5.3.1 alabaster-0.7.12 appdirs-1.4.4 appnope-0.1.0 asciitree-0.3.3 babel-2.8.0 backcall-0.2.0 cachey-0.2.1 certifi-2020.6.20 chardet-3.0.4 cycler-0.10.0 dask-2.25.0 decorator-4.4.2 docutils-0.16 fasteners-0.15 freetype-py-2.2.0 heapdict-1.0.1 idna-2.10 imageio-2.9.0 imagesize-1.2.0 ipykernel-5.3.4 ipython-genutils-0.2.0 jedi-0.17.2 jupyter-client-6.1.7 jupyter-core-4.6.3 kiwisolver-1.2.0 matplotlib-3.3.1 monotonic-1.5 napari-plugin-engine-0.1.7 napari-svg-0.1.3 networkx-2.5 numcodecs-0.6.4 numpy-1.19.1 numpydoc-1.1.0 packaging-20.4 parso-0.7.1 pexpect-4.8.0 pickleshare-0.7.5 pip-20.2.2 prompt-toolkit-3.0.7 psutil-5.7.2 ptyprocess-0.6.0 pygments-2.6.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 pyzmq-19.0.2 qtconsole-4.7.6 qtpy-1.9.0 requests-2.24.0 scikit-image-0.17.2 scipy-1.5.2 setuptools-50.1.0 shiboken2-5.14.2.2 six-1.15.0 snowballstemmer-2.0.0 sphinx-3.2.1 sphinxcontrib-applehelp-1.0.2 sphinxcontrib-devhelp-1.0.2 sphinxcontrib-htmlhelp-1.0.3 sphinxcontrib-jsmath-1.0.1 sphinxcontrib-qthelp-1.0.3 sphinxcontrib-serializinghtml-1.1.4 tifffile-2020.8.25 toolz-0.10.0 tornado-6.0.4 traitlets-5.0.2 typing-extensions-3.7.4.3 urllib3-1.25.10 vispy-0.6.4 wcwidth-0.2.5 wrapt-1.12.1 zarr-2.4.0
2020-09-03T02:43:47.9231030Z
2020-09-03T02:43:47.9231450Z Unpacking support package...
2020-09-03T02:43:47.9231530Z
2020-09-03T02:43:47.9231660Z [napari] Installing dependencies...
2020-09-03T02:43:47.9231730Z
2020-09-03T02:43:47.9232030Z [napari] Installing application code...
2020-09-03T02:43:47.9233380Z Installing napari...
2020-09-03T02:43:47.9233900Z
2020-09-03T02:43:47.9234310Z [napari] Installing application resources...
2020-09-03T02:43:47.9234600Z Installing resources/icon.icns as application icon...
2020-09-03T02:43:47.9234740Z
2020-09-03T02:43:47.9234960Z [napari] Created macOS/napari
2020-09-03T02:43:49.2999920Z
2020-09-03T02:43:49.3001460Z [napari] Built macOS/napari/napari.app
2020-09-03T02:43:59.3788940Z hdiutil: attach: WARNING: ignoring IDME options (obsolete)
2020-09-03T02:46:30.5785620Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:46:30.5787360Z hdiutil: detach: drive not detached
2020-09-03T02:48:31.6296690Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:48:31.6298410Z hdiutil: detach: drive not detached
2020-09-03T02:50:32.8600140Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:50:32.8601770Z hdiutil: detach: drive not detached
2020-09-03T02:52:33.9782130Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:52:33.9782560Z hdiutil: detach: drive not detached
2020-09-03T02:54:35.2440930Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:54:35.2441760Z hdiutil: detach: drive not detached
2020-09-03T02:56:36.4797740Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:56:36.4798020Z hdiutil: detach: drive not detached
2020-09-03T02:56:36.4811120Z
2020-09-03T02:56:36.4811350Z [napari] Building DMG...
2020-09-03T02:56:36.4823550Z Traceback (most recent call last):
2020-09-03T02:56:36.4826240Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/bin/briefcase", line 8, in <module>
2020-09-03T02:56:36.4832060Z sys.exit(main())
2020-09-03T02:56:36.4833530Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/briefcase/__main__.py", line 11, in main
2020-09-03T02:56:36.4833740Z command(**options)
2020-09-03T02:56:36.4834410Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/briefcase/commands/package.py", line 64, in __call__
2020-09-03T02:56:36.4834580Z state = self._package_app(app, update=update, **full_options(state, options))
2020-09-03T02:56:36.4835320Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/briefcase/commands/package.py", line 41, in _package_app
2020-09-03T02:56:36.4835560Z state = self.package_app(app, **full_options(state, options))
2020-09-03T02:56:36.4836220Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/briefcase/platforms/macOS/dmg.py", line 126, in package_app
2020-09-03T02:56:36.4836380Z self.dmgbuild.build_dmg(
2020-09-03T02:56:36.4836970Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/dmgbuild/core.py", line 561, in build_dmg
2020-09-03T02:56:36.8156560Z ##[error] raise DMGError('Unable to detach device cleanly')
2020-09-03T02:56:36.8164970Z dmgbuild.core.DMGError: Unable to detach device cleanly
2020-09-03T02:56:37.2347440Z Traceback (most recent call last):
2020-09-03T02:56:37.2348770Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/runpy.py", line 194, in _run_module_as_main
2020-09-03T02:56:37.2349390Z return _run_code(code, main_globals, None,
2020-09-03T02:56:37.2350010Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/runpy.py", line 87, in _run_code
2020-09-03T02:56:37.2350320Z exec(code, run_globals)
2020-09-03T02:56:37.2350720Z File "/Users/runner/work/napari/napari/bundle.py", line 178, in <module>
2020-09-03T02:56:37.2352190Z print('created', bundle())
2020-09-03T02:56:37.2352520Z File "/Users/runner/work/napari/napari/bundle.py", line 159, in bundle
2020-09-03T02:56:37.2352760Z subprocess.check_call(cmd)
2020-09-03T02:56:37.2353080Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/subprocess.py", line 364, in check_call
2020-09-03T02:56:37.2353380Z raise CalledProcessError(retcode, cmd)
2020-09-03T02:56:37.2354370Z subprocess.CalledProcessError: Command '['briefcase', 'package', '--no-sign']' returned non-zero exit status 1.
2020-09-03T02:56:37.2354630Z patched dmgbuild.core
2020-09-03T02:56:37.2354890Z updating pyproject.toml to version: 0.3.7rc3
2020-09-03T02:56:37.2356010Z created site-packages at /Users/runner/work/napari/napari/macOS/napari/napari.app/Contents/Resources/Support/lib/python3.8/site-packages
2020-09-03T02:56:37.2557870Z ##[error]Process completed with exit code 1.
2020-09-03T02:56:37.4868680Z Post job cleanup.
2020-09-03T02:56:38.3289540Z [command]/usr/local/bin/git version
2020-09-03T02:56:38.3406170Z git version 2.28.0
2020-09-03T02:56:38.3465130Z [command]/usr/local/bin/git config --local --name-only --get-regexp core\.sshCommand
2020-09-03T02:56:38.3561340Z [command]/usr/local/bin/git submodule foreach --recursive git config --local --name-only --get-regexp 'core\.sshCommand' && git config --local --unset-all 'core.sshCommand' || :
2020-09-03T02:56:39.0875020Z [command]/usr/local/bin/git config --local --name-only --get-regexp http\.https\:\/\/github\.com\/\.extraheader
2020-09-03T02:56:39.0946780Z http.https://github.com/.extraheader
2020-09-03T02:56:39.0969160Z [command]/usr/local/bin/git config --local --unset-all http.https://github.com/.extraheader
2020-09-03T02:56:39.3212090Z [command]/usr/local/bin/git submodule foreach --recursive git config --local --name-only --get-regexp 'http\.https\:\/\/github\.com\/\.extraheader' && git config --local --unset-all 'http.https://github.com/.extraheader' || :
2020-09-03T02:56:39.5687830Z Cleaning up orphan processes
2020-09-03T02:56:40.0263370Z Terminate orphan process: pid (2567) (diskimages-help)
|
DMGError
|
def patch_dmgbuild():
if not MACOS:
return
from dmgbuild import core
# will not be required after dmgbuild > v1.3.3
# see https://github.com/al45tair/dmgbuild/pull/18
with open(core.__file__, "r") as f:
src = f.read()
with open(core.__file__, "w") as f:
f.write(
src.replace(
"shutil.rmtree(os.path.join(mount_point, '.Trashes'), True)",
"shutil.rmtree(os.path.join(mount_point, '.Trashes'), True);time.sleep(30)",
)
)
print("patched dmgbuild.core")
|
def patch_dmgbuild():
if not MACOS:
return
from dmgbuild import core
# will not be required after dmgbuild > v1.3.3
# see https://github.com/al45tair/dmgbuild/pull/18
with open(core.__file__, "r") as f:
src = f.read()
if (
"max(total_size / 1024" not in src
and "all_args = ['/usr/bin/hdiutil', cmd]" not in src
):
return
with open(core.__file__, "w") as f:
f.write(
src.replace("max(total_size / 1024", "max(total_size / 1000").replace(
"all_args = ['/usr/bin/hdiutil', cmd]",
"all_args = ['sudo', '/usr/bin/hdiutil', cmd]",
)
)
print("patched dmgbuild.core")
|
https://github.com/napari/napari/issues/1611
|
2020-09-03T02:35:23.0596607Z ##[section]Starting: Request a runner to run this job
2020-09-03T02:35:24.3040765Z Can't find any online and idle self-hosted runner in current repository that matches the required labels: 'macos-latest'
2020-09-03T02:35:24.3040846Z Can't find any online and idle self-hosted runner in current repository's account/organization that matches the required labels: 'macos-latest'
2020-09-03T02:35:24.3041234Z Found online and idle hosted runner in current repository's account/organization that matches the required labels: 'macos-latest'
2020-09-03T02:35:24.5086668Z ##[section]Finishing: Request a runner to run this job
2020-09-03T02:35:34.8844850Z Current runner version: '2.273.0'
2020-09-03T02:35:34.9174660Z ##[group]Operating System
2020-09-03T02:35:34.9175340Z Mac OS X
2020-09-03T02:35:34.9175580Z 10.15.6
2020-09-03T02:35:34.9175740Z 19G2021
2020-09-03T02:35:34.9176060Z ##[endgroup]
2020-09-03T02:35:34.9176290Z ##[group]Virtual Environment
2020-09-03T02:35:34.9176550Z Environment: macos-10.15
2020-09-03T02:35:34.9176780Z Version: 20200829.1
2020-09-03T02:35:34.9177090Z Included Software: https://github.com/actions/virtual-environments/blob/macos-10.15/20200829.1/images/macos/macos-10.15-Readme.md
2020-09-03T02:35:34.9177350Z ##[endgroup]
2020-09-03T02:35:34.9178570Z Prepare workflow directory
2020-09-03T02:35:34.9446350Z Prepare all required actions
2020-09-03T02:35:34.9462860Z Download action repository 'actions/checkout@v2'
2020-09-03T02:35:35.5929130Z Download action repository 'actions/setup-python@v2'
2020-09-03T02:35:35.7881260Z Download action repository 'actions/upload-artifact@v2'
2020-09-03T02:35:35.9641130Z Download action repository 'bruceadams/get-release@v1.2.0'
2020-09-03T02:35:36.2726110Z Download action repository 'actions/upload-release-asset@v1'
2020-09-03T02:35:36.4816540Z ##[group]Run actions/checkout@v2
2020-09-03T02:35:36.4817050Z with:
2020-09-03T02:35:36.4817280Z fetch-depth: 0
2020-09-03T02:35:36.4817490Z repository: napari/napari
2020-09-03T02:35:36.4817980Z token: ***
2020-09-03T02:35:36.4818200Z ssh-strict: true
2020-09-03T02:35:36.4818360Z persist-credentials: true
2020-09-03T02:35:36.4818560Z clean: true
2020-09-03T02:35:36.4818750Z lfs: false
2020-09-03T02:35:36.4818940Z submodules: false
2020-09-03T02:35:36.4819130Z env:
2020-09-03T02:35:36.4819420Z GITHUB_TOKEN: ***
2020-09-03T02:35:36.4819640Z DISPLAY: :99.0
2020-09-03T02:35:36.4819830Z ##[endgroup]
2020-09-03T02:35:37.1854520Z Syncing repository: napari/napari
2020-09-03T02:35:37.1861710Z ##[group]Getting Git version info
2020-09-03T02:35:37.1863040Z Working directory is '/Users/runner/work/napari/napari'
2020-09-03T02:35:37.1863640Z [command]/usr/local/bin/git version
2020-09-03T02:35:37.9417000Z git version 2.28.0
2020-09-03T02:35:37.9419690Z ##[endgroup]
2020-09-03T02:35:37.9420750Z Deleting the contents of '/Users/runner/work/napari/napari'
2020-09-03T02:35:37.9422770Z ##[group]Initializing the repository
2020-09-03T02:35:37.9423050Z [command]/usr/local/bin/git init /Users/runner/work/napari/napari
2020-09-03T02:35:37.9423280Z Initialized empty Git repository in /Users/runner/work/napari/napari/.git/
2020-09-03T02:35:37.9423810Z [command]/usr/local/bin/git remote add origin https://github.com/napari/napari
2020-09-03T02:35:37.9424090Z ##[endgroup]
2020-09-03T02:35:37.9424570Z ##[group]Disabling automatic garbage collection
2020-09-03T02:35:37.9425470Z [command]/usr/local/bin/git config --local gc.auto 0
2020-09-03T02:35:37.9425920Z ##[endgroup]
2020-09-03T02:35:37.9428060Z ##[group]Setting up auth
2020-09-03T02:35:37.9428700Z [command]/usr/local/bin/git config --local --name-only --get-regexp core\.sshCommand
2020-09-03T02:35:37.9429800Z [command]/usr/local/bin/git submodule foreach --recursive git config --local --name-only --get-regexp 'core\.sshCommand' && git config --local --unset-all 'core.sshCommand' || :
2020-09-03T02:35:37.9430550Z [command]/usr/local/bin/git config --local --name-only --get-regexp http\.https\:\/\/github\.com\/\.extraheader
2020-09-03T02:35:37.9431740Z [command]/usr/local/bin/git submodule foreach --recursive git config --local --name-only --get-regexp 'http\.https\:\/\/github\.com\/\.extraheader' && git config --local --unset-all 'http.https://github.com/.extraheader' || :
2020-09-03T02:35:37.9432790Z [command]/usr/local/bin/git config --local http.https://github.com/.extraheader AUTHORIZATION: basic ***
2020-09-03T02:35:37.9433360Z ##[endgroup]
2020-09-03T02:35:37.9433860Z ##[group]Fetching the repository
2020-09-03T02:35:37.9434850Z [command]/usr/local/bin/git -c protocol.version=2 fetch --prune --progress --no-recurse-submodules origin +refs/heads/*:refs/remotes/origin/* +refs/tags/*:refs/tags/*
2020-09-03T02:35:37.9435950Z remote: Enumerating objects: 47, done.
2020-09-03T02:35:37.9436450Z remote: Counting objects: 2% (1/47)
2020-09-03T02:35:37.9436760Z remote: Counting objects: 4% (2/47)
2020-09-03T02:35:37.9437030Z remote: Counting objects: 6% (3/47)
2020-09-03T02:35:37.9437180Z remote: Counting objects: 8% (4/47)
2020-09-03T02:35:37.9437360Z remote: Counting objects: 10% (5/47)
2020-09-03T02:35:37.9437530Z remote: Counting objects: 12% (6/47)
2020-09-03T02:35:37.9438210Z remote: Counting objects: 14% (7/47)
2020-09-03T02:35:37.9438920Z remote: Counting objects: 17% (8/47)
2020-09-03T02:35:37.9440190Z remote: Counting objects: 19% (9/47)
2020-09-03T02:35:37.9440680Z remote: Counting objects: 21% (10/47)
2020-09-03T02:35:37.9441130Z remote: Counting objects: 23% (11/47)
2020-09-03T02:35:37.9441590Z remote: Counting objects: 25% (12/47)
2020-09-03T02:35:37.9442040Z remote: Counting objects: 27% (13/47)
2020-09-03T02:35:37.9452730Z remote: Counting objects: 29% (14/47)
2020-09-03T02:35:37.9453870Z remote: Counting objects: 31% (15/47)
2020-09-03T02:35:37.9454440Z remote: Counting objects: 34% (16/47)
2020-09-03T02:35:37.9454900Z remote: Counting objects: 36% (17/47)
2020-09-03T02:35:37.9456040Z remote: Counting objects: 38% (18/47)
2020-09-03T02:35:37.9456250Z remote: Counting objects: 40% (19/47)
2020-09-03T02:35:37.9456870Z remote: Counting objects: 42% (20/47)
2020-09-03T02:35:37.9457280Z remote: Counting objects: 44% (21/47)
2020-09-03T02:35:37.9457740Z remote: Counting objects: 46% (22/47)
2020-09-03T02:35:37.9458280Z remote: Counting objects: 48% (23/47)
2020-09-03T02:35:37.9458500Z remote: Counting objects: 51% (24/47)
2020-09-03T02:35:37.9458670Z remote: Counting objects: 53% (25/47)
2020-09-03T02:35:37.9458850Z remote: Counting objects: 55% (26/47)
2020-09-03T02:35:37.9459320Z remote: Counting objects: 57% (27/47)
2020-09-03T02:35:37.9460140Z remote: Counting objects: 59% (28/47)
2020-09-03T02:35:37.9460650Z remote: Counting objects: 61% (29/47)
2020-09-03T02:35:37.9461100Z remote: Counting objects: 63% (30/47)
2020-09-03T02:35:37.9461540Z remote: Counting objects: 65% (31/47)
2020-09-03T02:35:37.9461990Z remote: Counting objects: 68% (32/47)
2020-09-03T02:35:37.9462440Z remote: Counting objects: 70% (33/47)
2020-09-03T02:35:37.9462800Z remote: Counting objects: 72% (34/47)
2020-09-03T02:35:37.9463530Z remote: Counting objects: 74% (35/47)
2020-09-03T02:35:37.9463760Z remote: Counting objects: 76% (36/47)
2020-09-03T02:35:37.9463940Z remote: Counting objects: 78% (37/47)
2020-09-03T02:35:37.9464120Z remote: Counting objects: 80% (38/47)
2020-09-03T02:35:37.9464300Z remote: Counting objects: 82% (39/47)
2020-09-03T02:35:37.9464470Z remote: Counting objects: 85% (40/47)
2020-09-03T02:35:37.9464680Z remote: Counting objects: 87% (41/47)
2020-09-03T02:35:37.9465120Z remote: Counting objects: 89% (42/47)
2020-09-03T02:35:37.9465420Z remote: Counting objects: 91% (43/47)
2020-09-03T02:35:37.9465570Z remote: Counting objects: 93% (44/47)
2020-09-03T02:35:37.9465990Z remote: Counting objects: 95% (45/47)
2020-09-03T02:35:37.9467000Z remote: Counting objects: 97% (46/47)
2020-09-03T02:35:37.9467520Z remote: Counting objects: 100% (47/47)
2020-09-03T02:35:37.9467970Z remote: Counting objects: 100% (47/47), done.
2020-09-03T02:35:37.9468270Z remote: Compressing objects: 2% (1/43)
2020-09-03T02:35:37.9468610Z remote: Compressing objects: 4% (2/43)
2020-09-03T02:35:37.9468880Z remote: Compressing objects: 6% (3/43)
2020-09-03T02:35:37.9469160Z remote: Compressing objects: 9% (4/43)
2020-09-03T02:35:37.9469510Z remote: Compressing objects: 11% (5/43)
2020-09-03T02:35:37.9470600Z remote: Compressing objects: 13% (6/43)
2020-09-03T02:35:37.9471100Z remote: Compressing objects: 16% (7/43)
2020-09-03T02:35:37.9471390Z remote: Compressing objects: 18% (8/43)
2020-09-03T02:35:37.9471850Z remote: Compressing objects: 20% (9/43)
2020-09-03T02:35:37.9472140Z remote: Compressing objects: 23% (10/43)
2020-09-03T02:35:37.9472340Z remote: Compressing objects: 25% (11/43)
2020-09-03T02:35:37.9472760Z remote: Compressing objects: 27% (12/43)
2020-09-03T02:35:37.9473050Z remote: Compressing objects: 30% (13/43)
2020-09-03T02:35:37.9473490Z remote: Compressing objects: 32% (14/43)
2020-09-03T02:35:37.9473940Z remote: Compressing objects: 34% (15/43)
2020-09-03T02:35:37.9474380Z remote: Compressing objects: 37% (16/43)
2020-09-03T02:35:37.9474820Z remote: Compressing objects: 39% (17/43)
2020-09-03T02:35:37.9475100Z remote: Compressing objects: 41% (18/43)
2020-09-03T02:35:37.9475380Z remote: Compressing objects: 44% (19/43)
2020-09-03T02:35:37.9475960Z remote: Compressing objects: 46% (20/43)
2020-09-03T02:35:37.9477090Z remote: Compressing objects: 48% (21/43)
2020-09-03T02:35:37.9477290Z remote: Compressing objects: 51% (22/43)
2020-09-03T02:35:37.9477430Z remote: Compressing objects: 53% (23/43)
2020-09-03T02:35:37.9477610Z remote: Compressing objects: 55% (24/43)
2020-09-03T02:35:37.9477790Z remote: Compressing objects: 58% (25/43)
2020-09-03T02:35:37.9477960Z remote: Compressing objects: 60% (26/43)
2020-09-03T02:35:37.9478140Z remote: Compressing objects: 62% (27/43)
2020-09-03T02:35:37.9478320Z remote: Compressing objects: 65% (28/43)
2020-09-03T02:35:37.9478490Z remote: Compressing objects: 67% (29/43)
2020-09-03T02:35:37.9478680Z remote: Compressing objects: 69% (30/43)
2020-09-03T02:35:37.9479020Z remote: Compressing objects: 72% (31/43)
2020-09-03T02:35:37.9479230Z remote: Compressing objects: 74% (32/43)
2020-09-03T02:35:37.9479420Z remote: Compressing objects: 76% (33/43)
2020-09-03T02:35:37.9479560Z remote: Compressing objects: 79% (34/43)
2020-09-03T02:35:37.9479740Z remote: Compressing objects: 81% (35/43)
2020-09-03T02:35:37.9479920Z remote: Compressing objects: 83% (36/43)
2020-09-03T02:35:37.9480090Z remote: Compressing objects: 86% (37/43)
2020-09-03T02:35:37.9480280Z remote: Compressing objects: 88% (38/43)
2020-09-03T02:35:37.9480460Z remote: Compressing objects: 90% (39/43)
2020-09-03T02:35:37.9480670Z remote: Compressing objects: 93% (40/43)
2020-09-03T02:35:37.9481060Z remote: Compressing objects: 95% (41/43)
2020-09-03T02:35:37.9481230Z remote: Compressing objects: 97% (42/43)
2020-09-03T02:35:37.9481410Z remote: Compressing objects: 100% (43/43)
2020-09-03T02:35:37.9481590Z remote: Compressing objects: 100% (43/43), done.
2020-09-03T02:35:37.9481810Z Receiving objects: 0% (1/10717)
2020-09-03T02:35:37.9482000Z Receiving objects: 1% (108/10717)
2020-09-03T02:35:37.9482190Z Receiving objects: 2% (215/10717)
2020-09-03T02:35:37.9482370Z Receiving objects: 3% (322/10717)
2020-09-03T02:35:37.9482540Z Receiving objects: 4% (429/10717)
2020-09-03T02:35:37.9482710Z Receiving objects: 5% (536/10717)
2020-09-03T02:35:37.9482850Z Receiving objects: 6% (644/10717)
2020-09-03T02:35:37.9483020Z Receiving objects: 7% (751/10717)
2020-09-03T02:35:37.9483190Z Receiving objects: 8% (858/10717)
2020-09-03T02:35:37.9581050Z Receiving objects: 9% (965/10717)
2020-09-03T02:35:38.4608170Z Receiving objects: 10% (1072/10717)
2020-09-03T02:35:38.4613280Z Receiving objects: 11% (1179/10717)
2020-09-03T02:35:38.4613620Z Receiving objects: 12% (1287/10717)
2020-09-03T02:35:38.4613900Z Receiving objects: 13% (1394/10717)
2020-09-03T02:35:38.4614510Z Receiving objects: 14% (1501/10717)
2020-09-03T02:35:38.4615050Z Receiving objects: 15% (1608/10717)
2020-09-03T02:35:38.4616020Z Receiving objects: 16% (1715/10717)
2020-09-03T02:35:38.4616940Z Receiving objects: 17% (1822/10717)
2020-09-03T02:35:38.4617760Z Receiving objects: 18% (1930/10717)
2020-09-03T02:35:38.4618780Z Receiving objects: 19% (2037/10717)
2020-09-03T02:35:38.4619670Z Receiving objects: 20% (2144/10717)
2020-09-03T02:35:38.4620580Z Receiving objects: 21% (2251/10717)
2020-09-03T02:35:38.4620950Z Receiving objects: 22% (2358/10717)
2020-09-03T02:35:38.4621770Z Receiving objects: 23% (2465/10717)
2020-09-03T02:35:38.4622230Z Receiving objects: 24% (2573/10717)
2020-09-03T02:35:38.4622870Z Receiving objects: 25% (2680/10717)
2020-09-03T02:35:38.4623340Z Receiving objects: 26% (2787/10717)
2020-09-03T02:35:38.4624220Z Receiving objects: 27% (2894/10717)
2020-09-03T02:35:38.4625050Z Receiving objects: 28% (3001/10717)
2020-09-03T02:35:38.4625270Z Receiving objects: 29% (3108/10717)
2020-09-03T02:35:38.4625460Z Receiving objects: 30% (3216/10717)
2020-09-03T02:35:38.4626090Z Receiving objects: 31% (3323/10717)
2020-09-03T02:35:38.4627050Z Receiving objects: 32% (3430/10717)
2020-09-03T02:35:38.4627530Z Receiving objects: 33% (3537/10717)
2020-09-03T02:35:38.4628030Z Receiving objects: 34% (3644/10717)
2020-09-03T02:35:38.4628810Z Receiving objects: 35% (3751/10717)
2020-09-03T02:35:38.4629540Z Receiving objects: 36% (3859/10717)
2020-09-03T02:35:38.4630300Z Receiving objects: 37% (3966/10717)
2020-09-03T02:35:38.4631060Z Receiving objects: 38% (4073/10717)
2020-09-03T02:35:38.4631840Z Receiving objects: 39% (4180/10717)
2020-09-03T02:35:38.4632880Z Receiving objects: 40% (4287/10717)
2020-09-03T02:35:38.4633290Z Receiving objects: 41% (4394/10717)
2020-09-03T02:35:38.4633580Z Receiving objects: 42% (4502/10717)
2020-09-03T02:35:38.4633990Z Receiving objects: 43% (4609/10717)
2020-09-03T02:35:38.4634340Z Receiving objects: 44% (4716/10717)
2020-09-03T02:35:38.4635330Z Receiving objects: 45% (4823/10717)
2020-09-03T02:35:38.4635740Z Receiving objects: 46% (4930/10717)
2020-09-03T02:35:38.4636190Z Receiving objects: 47% (5037/10717)
2020-09-03T02:35:38.4636610Z Receiving objects: 48% (5145/10717)
2020-09-03T02:35:38.4637030Z Receiving objects: 49% (5252/10717)
2020-09-03T02:35:38.4637370Z Receiving objects: 50% (5359/10717)
2020-09-03T02:35:38.4638250Z Receiving objects: 51% (5466/10717)
2020-09-03T02:35:38.4639060Z Receiving objects: 52% (5573/10717)
2020-09-03T02:35:38.4639900Z Receiving objects: 53% (5681/10717)
2020-09-03T02:35:38.4640510Z Receiving objects: 54% (5788/10717)
2020-09-03T02:35:38.4640970Z Receiving objects: 55% (5895/10717)
2020-09-03T02:35:38.4641700Z Receiving objects: 56% (6002/10717)
2020-09-03T02:35:38.4642050Z Receiving objects: 57% (6109/10717)
2020-09-03T02:35:38.4642750Z Receiving objects: 58% (6216/10717)
2020-09-03T02:35:38.4643260Z Receiving objects: 59% (6324/10717)
2020-09-03T02:35:38.4643750Z Receiving objects: 60% (6431/10717)
2020-09-03T02:35:38.4644150Z Receiving objects: 61% (6538/10717)
2020-09-03T02:35:38.4644490Z Receiving objects: 62% (6645/10717)
2020-09-03T02:35:38.4644900Z Receiving objects: 63% (6752/10717)
2020-09-03T02:35:38.4645210Z Receiving objects: 64% (6859/10717)
2020-09-03T02:35:38.4645860Z Receiving objects: 65% (6967/10717)
2020-09-03T02:35:38.4646070Z Receiving objects: 66% (7074/10717)
2020-09-03T02:35:38.4646240Z Receiving objects: 67% (7181/10717)
2020-09-03T02:35:38.4646430Z Receiving objects: 68% (7288/10717)
2020-09-03T02:35:38.4646620Z Receiving objects: 69% (7395/10717)
2020-09-03T02:35:38.4646800Z Receiving objects: 70% (7502/10717)
2020-09-03T02:35:38.4646940Z Receiving objects: 71% (7610/10717)
2020-09-03T02:35:38.4647120Z Receiving objects: 72% (7717/10717)
2020-09-03T02:35:38.4647290Z Receiving objects: 73% (7824/10717)
2020-09-03T02:35:38.4647470Z Receiving objects: 74% (7931/10717)
2020-09-03T02:35:38.4647640Z Receiving objects: 75% (8038/10717)
2020-09-03T02:35:38.4647820Z Receiving objects: 76% (8145/10717)
2020-09-03T02:35:38.4648000Z Receiving objects: 77% (8253/10717)
2020-09-03T02:35:38.4648910Z Receiving objects: 78% (8360/10717)
2020-09-03T02:35:38.4649090Z Receiving objects: 79% (8467/10717)
2020-09-03T02:35:38.4649490Z Receiving objects: 80% (8574/10717)
2020-09-03T02:35:38.4649960Z Receiving objects: 81% (8681/10717)
2020-09-03T02:35:38.4650480Z Receiving objects: 82% (8788/10717)
2020-09-03T02:35:38.4650920Z Receiving objects: 83% (8896/10717)
2020-09-03T02:35:38.4651940Z Receiving objects: 84% (9003/10717)
2020-09-03T02:35:38.4652500Z Receiving objects: 85% (9110/10717)
2020-09-03T02:35:38.4653110Z Receiving objects: 86% (9217/10717)
2020-09-03T02:35:38.4653640Z Receiving objects: 87% (9324/10717)
2020-09-03T02:35:38.4654060Z Receiving objects: 88% (9431/10717)
2020-09-03T02:35:38.4654660Z Receiving objects: 89% (9539/10717)
2020-09-03T02:35:38.4655360Z Receiving objects: 90% (9646/10717)
2020-09-03T02:35:38.4656060Z Receiving objects: 91% (9753/10717)
2020-09-03T02:35:38.4657170Z Receiving objects: 92% (9860/10717)
2020-09-03T02:35:38.4658040Z Receiving objects: 93% (9967/10717)
2020-09-03T02:35:38.4658710Z Receiving objects: 94% (10074/10717)
2020-09-03T02:35:38.4659350Z Receiving objects: 95% (10182/10717)
2020-09-03T02:35:38.4659950Z Receiving objects: 96% (10289/10717)
2020-09-03T02:35:38.4660460Z Receiving objects: 97% (10396/10717)
2020-09-03T02:35:38.4661810Z remote: Total 10717 (delta 16), reused 12 (delta 4), pack-reused 10670
2020-09-03T02:35:38.4662390Z Receiving objects: 98% (10503/10717)
2020-09-03T02:35:38.4662900Z Receiving objects: 99% (10610/10717)
2020-09-03T02:35:38.4663390Z Receiving objects: 100% (10717/10717)
2020-09-03T02:35:38.4663830Z Receiving objects: 100% (10717/10717), 19.50 MiB | 52.68 MiB/s, done.
2020-09-03T02:35:38.4664540Z Resolving deltas: 0% (0/7480)
2020-09-03T02:35:38.4665340Z Resolving deltas: 1% (86/7480)
2020-09-03T02:35:38.4666210Z Resolving deltas: 2% (150/7480)
2020-09-03T02:35:38.4666690Z Resolving deltas: 3% (226/7480)
2020-09-03T02:35:38.4667480Z Resolving deltas: 4% (303/7480)
2020-09-03T02:35:38.4667740Z Resolving deltas: 5% (374/7480)
2020-09-03T02:35:38.4667950Z Resolving deltas: 6% (458/7480)
2020-09-03T02:35:38.4668640Z Resolving deltas: 7% (527/7480)
2020-09-03T02:35:38.4669910Z Resolving deltas: 8% (602/7480)
2020-09-03T02:35:38.4670240Z Resolving deltas: 9% (681/7480)
2020-09-03T02:35:38.4671200Z Resolving deltas: 10% (751/7480)
2020-09-03T02:35:38.4671950Z Resolving deltas: 11% (825/7480)
2020-09-03T02:35:38.4672400Z Resolving deltas: 12% (899/7480)
2020-09-03T02:35:38.4673290Z Resolving deltas: 13% (976/7480)
2020-09-03T02:35:38.4674090Z Resolving deltas: 14% (1055/7480)
2020-09-03T02:35:38.4674260Z Resolving deltas: 15% (1122/7480)
2020-09-03T02:35:38.4674750Z Resolving deltas: 16% (1246/7480)
2020-09-03T02:35:38.4675370Z Resolving deltas: 17% (1275/7480)
2020-09-03T02:35:38.4675860Z Resolving deltas: 18% (1352/7480)
2020-09-03T02:35:38.4676630Z Resolving deltas: 19% (1440/7480)
2020-09-03T02:35:38.4677130Z Resolving deltas: 20% (1496/7480)
2020-09-03T02:35:38.4677650Z Resolving deltas: 21% (1571/7480)
2020-09-03T02:35:38.4678160Z Resolving deltas: 22% (1668/7480)
2020-09-03T02:35:38.4678380Z Resolving deltas: 23% (1726/7480)
2020-09-03T02:35:38.4678680Z Resolving deltas: 24% (1797/7480)
2020-09-03T02:35:38.4678830Z Resolving deltas: 25% (1879/7480)
2020-09-03T02:35:38.4679320Z Resolving deltas: 26% (1946/7480)
2020-09-03T02:35:38.4679810Z Resolving deltas: 27% (2023/7480)
2020-09-03T02:35:38.4680290Z Resolving deltas: 28% (2109/7480)
2020-09-03T02:35:38.4680770Z Resolving deltas: 29% (2181/7480)
2020-09-03T02:35:38.4681100Z Resolving deltas: 30% (2256/7480)
2020-09-03T02:35:38.4681560Z Resolving deltas: 31% (2319/7480)
2020-09-03T02:35:38.4682050Z Resolving deltas: 32% (2407/7480)
2020-09-03T02:35:38.4682660Z Resolving deltas: 33% (2470/7480)
2020-09-03T02:35:38.4682880Z Resolving deltas: 34% (2556/7480)
2020-09-03T02:35:38.4683080Z Resolving deltas: 35% (2626/7480)
2020-09-03T02:35:38.4683220Z Resolving deltas: 36% (2710/7480)
2020-09-03T02:35:38.4683810Z Resolving deltas: 37% (2783/7480)
2020-09-03T02:35:38.4684040Z Resolving deltas: 38% (2867/7480)
2020-09-03T02:35:38.4684250Z Resolving deltas: 39% (2918/7480)
2020-09-03T02:35:38.4684440Z Resolving deltas: 40% (2997/7480)
2020-09-03T02:35:38.4684630Z Resolving deltas: 41% (3067/7480)
2020-09-03T02:35:38.4684910Z Resolving deltas: 42% (3154/7480)
2020-09-03T02:35:38.4685130Z Resolving deltas: 43% (3219/7480)
2020-09-03T02:35:38.4685650Z Resolving deltas: 44% (3301/7480)
2020-09-03T02:35:38.4686450Z Resolving deltas: 45% (3386/7480)
2020-09-03T02:35:38.4687170Z Resolving deltas: 46% (3446/7480)
2020-09-03T02:35:38.4688160Z Resolving deltas: 47% (3521/7480)
2020-09-03T02:35:38.4689440Z Resolving deltas: 48% (3591/7480)
2020-09-03T02:35:38.4689690Z Resolving deltas: 49% (3666/7480)
2020-09-03T02:35:38.4690020Z Resolving deltas: 50% (3740/7480)
2020-09-03T02:35:38.4690220Z Resolving deltas: 51% (3816/7480)
2020-09-03T02:35:38.4690420Z Resolving deltas: 52% (3894/7480)
2020-09-03T02:35:38.4690650Z Resolving deltas: 53% (3967/7480)
2020-09-03T02:35:38.4691150Z Resolving deltas: 54% (4045/7480)
2020-09-03T02:35:38.4691460Z Resolving deltas: 55% (4121/7480)
2020-09-03T02:35:38.4691850Z Resolving deltas: 56% (4194/7480)
2020-09-03T02:35:38.4692190Z Resolving deltas: 57% (4277/7480)
2020-09-03T02:35:38.4692490Z Resolving deltas: 58% (4366/7480)
2020-09-03T02:35:38.4693000Z Resolving deltas: 59% (4414/7480)
2020-09-03T02:35:38.4693290Z Resolving deltas: 60% (4533/7480)
2020-09-03T02:35:38.4693550Z Resolving deltas: 61% (4564/7480)
2020-09-03T02:35:38.4693700Z Resolving deltas: 62% (4644/7480)
2020-09-03T02:35:38.4693890Z Resolving deltas: 63% (4716/7480)
2020-09-03T02:35:38.4694090Z Resolving deltas: 64% (4797/7480)
2020-09-03T02:35:38.4694350Z Resolving deltas: 65% (4876/7480)
2020-09-03T02:35:38.4812940Z Resolving deltas: 66% (4939/7480)
2020-09-03T02:35:38.4813570Z Resolving deltas: 67% (5019/7480)
2020-09-03T02:35:38.4814640Z Resolving deltas: 68% (5087/7480)
2020-09-03T02:35:38.4815340Z Resolving deltas: 69% (5168/7480)
2020-09-03T02:35:38.4815970Z Resolving deltas: 70% (5236/7480)
2020-09-03T02:35:38.4816580Z Resolving deltas: 71% (5322/7480)
2020-09-03T02:35:38.4817180Z Resolving deltas: 72% (5386/7480)
2020-09-03T02:35:38.4817970Z Resolving deltas: 73% (5474/7480)
2020-09-03T02:35:38.4818840Z Resolving deltas: 74% (5542/7480)
2020-09-03T02:35:38.4819240Z Resolving deltas: 75% (5623/7480)
2020-09-03T02:35:38.4820060Z Resolving deltas: 76% (5685/7480)
2020-09-03T02:35:38.4820460Z Resolving deltas: 77% (5761/7480)
2020-09-03T02:35:38.4820770Z Resolving deltas: 78% (5836/7480)
2020-09-03T02:35:38.4821670Z Resolving deltas: 79% (5912/7480)
2020-09-03T02:35:38.4822110Z Resolving deltas: 80% (5993/7480)
2020-09-03T02:35:38.4822570Z Resolving deltas: 81% (6059/7480)
2020-09-03T02:35:38.4823020Z Resolving deltas: 82% (6137/7480)
2020-09-03T02:35:38.4823450Z Resolving deltas: 83% (6215/7480)
2020-09-03T02:35:38.4823880Z Resolving deltas: 84% (6291/7480)
2020-09-03T02:35:38.4824340Z Resolving deltas: 85% (6359/7480)
2020-09-03T02:35:38.4824770Z Resolving deltas: 86% (6442/7480)
2020-09-03T02:35:38.4825100Z Resolving deltas: 87% (6508/7480)
2020-09-03T02:35:38.4825530Z Resolving deltas: 88% (6591/7480)
2020-09-03T02:35:38.4825940Z Resolving deltas: 89% (6658/7480)
2020-09-03T02:35:38.4826370Z Resolving deltas: 90% (6736/7480)
2020-09-03T02:35:38.4826770Z Resolving deltas: 91% (6841/7480)
2020-09-03T02:35:38.4827180Z Resolving deltas: 92% (6886/7480)
2020-09-03T02:35:38.4827490Z Resolving deltas: 93% (6963/7480)
2020-09-03T02:35:38.4827700Z Resolving deltas: 94% (7062/7480)
2020-09-03T02:35:38.4827980Z Resolving deltas: 95% (7112/7480)
2020-09-03T02:35:38.4828320Z Resolving deltas: 96% (7188/7480)
2020-09-03T02:35:38.4828650Z Resolving deltas: 97% (7256/7480)
2020-09-03T02:35:38.4828930Z Resolving deltas: 98% (7343/7480)
2020-09-03T02:35:38.4829810Z Resolving deltas: 99% (7412/7480)
2020-09-03T02:35:38.4830190Z Resolving deltas: 100% (7480/7480)
2020-09-03T02:35:38.4831140Z Resolving deltas: 100% (7480/7480), done.
2020-09-03T02:35:38.4831460Z From https://github.com/napari/napari
2020-09-03T02:35:38.4833240Z * [new branch] master -> origin/master
2020-09-03T02:35:38.4834240Z * [new tag] v0.0.1 -> v0.0.1
2020-09-03T02:35:38.4835570Z * [new tag] v0.0.2 -> v0.0.2
2020-09-03T02:35:38.4836820Z * [new tag] v0.0.3.1 -> v0.0.3.1
2020-09-03T02:35:38.4838110Z * [new tag] v0.0.4 -> v0.0.4
2020-09-03T02:35:38.4839410Z * [new tag] v0.0.5 -> v0.0.5
2020-09-03T02:35:38.4840660Z * [new tag] v0.0.5.1 -> v0.0.5.1
2020-09-03T02:35:38.4841890Z * [new tag] v0.0.6 -> v0.0.6
2020-09-03T02:35:38.4843100Z * [new tag] v0.0.7 -> v0.0.7
2020-09-03T02:35:38.4844320Z * [new tag] v0.0.8 -> v0.0.8
2020-09-03T02:35:38.4845530Z * [new tag] v0.0.9 -> v0.0.9
2020-09-03T02:35:38.4846760Z * [new tag] v0.1.0 -> v0.1.0
2020-09-03T02:35:38.4848070Z * [new tag] v0.1.1 -> v0.1.1
2020-09-03T02:35:38.4849910Z * [new tag] v0.1.2 -> v0.1.2
2020-09-03T02:35:38.4851410Z * [new tag] v0.1.3 -> v0.1.3
2020-09-03T02:35:38.4852910Z * [new tag] v0.1.4 -> v0.1.4
2020-09-03T02:35:38.4854000Z * [new tag] v0.1.5 -> v0.1.5
2020-09-03T02:35:38.4855970Z * [new tag] v0.2.0 -> v0.2.0
2020-09-03T02:35:38.4857010Z * [new tag] v0.2.1 -> v0.2.1
2020-09-03T02:35:38.4857860Z * [new tag] v0.2.10 -> v0.2.10
2020-09-03T02:35:38.4858590Z * [new tag] v0.2.10rc0 -> v0.2.10rc0
2020-09-03T02:35:38.4859820Z * [new tag] v0.2.11 -> v0.2.11
2020-09-03T02:35:38.4860860Z * [new tag] v0.2.11rc0 -> v0.2.11rc0
2020-09-03T02:35:38.4861760Z * [new tag] v0.2.12 -> v0.2.12
2020-09-03T02:35:38.4862670Z * [new tag] v0.2.12rc0 -> v0.2.12rc0
2020-09-03T02:35:38.4864540Z * [new tag] v0.2.12rc1 -> v0.2.12rc1
2020-09-03T02:35:38.4866050Z * [new tag] v0.2.12rc2 -> v0.2.12rc2
2020-09-03T02:35:38.4867770Z * [new tag] v0.2.12rc3 -> v0.2.12rc3
2020-09-03T02:35:38.4868930Z * [new tag] v0.2.12rc4 -> v0.2.12rc4
2020-09-03T02:35:38.4870080Z * [new tag] v0.2.2 -> v0.2.2
2020-09-03T02:35:38.4871720Z * [new tag] v0.2.3 -> v0.2.3
2020-09-03T02:35:38.4872820Z * [new tag] v0.2.4 -> v0.2.4
2020-09-03T02:35:38.4873860Z * [new tag] v0.2.4rc1 -> v0.2.4rc1
2020-09-03T02:35:38.4875390Z * [new tag] v0.2.5 -> v0.2.5
2020-09-03T02:35:38.4876940Z * [new tag] v0.2.5rc1 -> v0.2.5rc1
2020-09-03T02:35:38.4878440Z * [new tag] v0.2.6 -> v0.2.6
2020-09-03T02:35:38.4879620Z * [new tag] v0.2.6rc1 -> v0.2.6rc1
2020-09-03T02:35:38.4880760Z * [new tag] v0.2.7 -> v0.2.7
2020-09-03T02:35:38.4882430Z * [new tag] v0.2.7rc1 -> v0.2.7rc1
2020-09-03T02:35:38.4883530Z * [new tag] v0.2.7rc2 -> v0.2.7rc2
2020-09-03T02:35:38.4884580Z * [new tag] v0.2.8 -> v0.2.8
2020-09-03T02:35:38.4885740Z * [new tag] v0.2.8rc1 -> v0.2.8rc1
2020-09-03T02:35:38.4886860Z * [new tag] v0.2.9 -> v0.2.9
2020-09-03T02:35:38.4888410Z * [new tag] v0.2.9rc1 -> v0.2.9rc1
2020-09-03T02:35:38.4889560Z * [new tag] v0.3.0 -> v0.3.0
2020-09-03T02:35:38.4891240Z * [new tag] v0.3.0rc0 -> v0.3.0rc0
2020-09-03T02:35:38.4892330Z * [new tag] v0.3.0rc1 -> v0.3.0rc1
2020-09-03T02:35:38.4893450Z * [new tag] v0.3.1 -> v0.3.1
2020-09-03T02:35:38.4894490Z * [new tag] v0.3.1rc0 -> v0.3.1rc0
2020-09-03T02:35:38.4895710Z * [new tag] v0.3.2 -> v0.3.2
2020-09-03T02:35:38.4896900Z * [new tag] v0.3.2rc0 -> v0.3.2rc0
2020-09-03T02:35:38.4898040Z * [new tag] v0.3.2rc1 -> v0.3.2rc1
2020-09-03T02:35:38.4899210Z * [new tag] v0.3.2rc2 -> v0.3.2rc2
2020-09-03T02:35:38.4900440Z * [new tag] v0.3.2rc3 -> v0.3.2rc3
2020-09-03T02:35:38.4901580Z * [new tag] v0.3.3 -> v0.3.3
2020-09-03T02:35:38.4903400Z * [new tag] v0.3.3rc0 -> v0.3.3rc0
2020-09-03T02:35:38.4904550Z * [new tag] v0.3.4 -> v0.3.4
2020-09-03T02:35:38.4905700Z * [new tag] v0.3.4rc0 -> v0.3.4rc0
2020-09-03T02:35:38.4906840Z * [new tag] v0.3.5 -> v0.3.5
2020-09-03T02:35:38.4907970Z * [new tag] v0.3.5rc0 -> v0.3.5rc0
2020-09-03T02:35:38.4909110Z * [new tag] v0.3.5rc1 -> v0.3.5rc1
2020-09-03T02:35:38.4910330Z * [new tag] v0.3.6 -> v0.3.6
2020-09-03T02:35:38.4911450Z * [new tag] v0.3.6rc0 -> v0.3.6rc0
2020-09-03T02:35:38.4912720Z * [new tag] v0.3.6rc1 -> v0.3.6rc1
2020-09-03T02:35:38.4914590Z * [new tag] v0.3.6rc2 -> v0.3.6rc2
2020-09-03T02:35:38.4915850Z * [new tag] v0.3.7rc0 -> v0.3.7rc0
2020-09-03T02:35:38.4917010Z * [new tag] v0.3.7rc1 -> v0.3.7rc1
2020-09-03T02:35:38.4918150Z * [new tag] v0.3.7rc2 -> v0.3.7rc2
2020-09-03T02:35:38.4919680Z * [new tag] v0.3.7rc3 -> v0.3.7rc3
2020-09-03T02:35:38.4930520Z [command]/usr/local/bin/git tag --list v0.3.7rc3
2020-09-03T02:35:38.5025190Z v0.3.7rc3
2020-09-03T02:35:38.5037710Z [command]/usr/local/bin/git rev-parse refs/tags/v0.3.7rc3
2020-09-03T02:35:38.5097480Z ea0fc9ca29d730808b1a66a8710679ee0fbe7ab5
2020-09-03T02:35:38.5103990Z ##[endgroup]
2020-09-03T02:35:38.5104580Z ##[group]Determining the checkout info
2020-09-03T02:35:38.5107420Z ##[endgroup]
2020-09-03T02:35:38.5107950Z ##[group]Checking out the ref
2020-09-03T02:35:38.5114450Z [command]/usr/local/bin/git checkout --progress --force refs/tags/v0.3.7rc3
2020-09-03T02:35:38.6599120Z HEAD is now at ea0fc9c Skip bundle test when setup.cfg is missing (#1608)
2020-09-03T02:35:38.6615110Z ##[endgroup]
2020-09-03T02:35:38.6622620Z [command]/usr/local/bin/git log -1
2020-09-03T02:35:38.6728870Z commit ea0fc9ca29d730808b1a66a8710679ee0fbe7ab5
2020-09-03T02:35:38.6731460Z Author: Juan Nunez-Iglesias <juan.nunez-iglesias@monash.edu>
2020-09-03T02:35:38.6733120Z Date: Thu Sep 3 12:34:39 2020 +1000
2020-09-03T02:35:38.6733820Z
2020-09-03T02:35:38.6734690Z Skip bundle test when setup.cfg is missing (#1608)
2020-09-03T02:35:38.6735460Z
2020-09-03T02:35:38.6736200Z * Skip bundle test in bdist
2020-09-03T02:35:38.6739750Z
2020-09-03T02:35:38.6743280Z * Don't remove test_bundle in sdist for consistency with bdist
2020-09-03T02:35:38.6743950Z
2020-09-03T02:35:38.6749860Z * Update release notes
2020-09-03T02:35:38.6751410Z
2020-09-03T02:35:38.6753010Z * Remove inaccurate module docstring in test_bundle
2020-09-03T02:35:38.6885830Z ##[group]Run actions/setup-python@v2
2020-09-03T02:35:38.6886120Z with:
2020-09-03T02:35:38.6886240Z python-version: 3.8
2020-09-03T02:35:38.6886580Z token: ***
2020-09-03T02:35:38.6886720Z env:
2020-09-03T02:35:38.6886980Z GITHUB_TOKEN: ***
2020-09-03T02:35:38.6887160Z DISPLAY: :99.0
2020-09-03T02:35:38.6887310Z ##[endgroup]
2020-09-03T02:35:38.7740470Z Successfully setup CPython (3.8.5)
2020-09-03T02:35:38.7831450Z ##[group]Run python -m pip install --upgrade pip
2020-09-03T02:35:38.7832040Z �[36;1mpython -m pip install --upgrade pip �[0m
2020-09-03T02:35:38.7832410Z �[36;1mpython -m pip install briefcase==0.3.1 tomlkit wheel�[0m
2020-09-03T02:35:38.7832710Z �[36;1mpython -m pip install -e .[pyside2]�[0m
2020-09-03T02:35:38.8073350Z shell: /bin/bash -e {0}
2020-09-03T02:35:38.8073570Z env:
2020-09-03T02:35:38.8074060Z GITHUB_TOKEN: ***
2020-09-03T02:35:38.8074250Z DISPLAY: :99.0
2020-09-03T02:35:38.8074390Z pythonLocation: /Users/runner/hostedtoolcache/Python/3.8.5/x64
2020-09-03T02:35:38.8074560Z ##[endgroup]
2020-09-03T02:35:41.0869480Z Requirement already up-to-date: pip in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (20.2.2)
2020-09-03T02:35:41.5928780Z Collecting briefcase==0.3.1
2020-09-03T02:35:41.6138770Z Downloading briefcase-0.3.1-py2.py3-none-any.whl (77 kB)
2020-09-03T02:35:41.7985320Z Collecting tomlkit
2020-09-03T02:35:41.8052800Z Downloading tomlkit-0.7.0-py2.py3-none-any.whl (32 kB)
2020-09-03T02:35:42.0119920Z Collecting wheel
2020-09-03T02:35:42.0173290Z Downloading wheel-0.35.1-py2.py3-none-any.whl (33 kB)
2020-09-03T02:35:42.1233540Z Collecting requests>=2.22.0
2020-09-03T02:35:42.1285950Z Downloading requests-2.24.0-py2.py3-none-any.whl (61 kB)
2020-09-03T02:35:42.1657480Z Collecting dmgbuild>=1.3.3; sys_platform == "darwin"
2020-09-03T02:35:42.1726680Z Downloading dmgbuild-1.3.3.tar.gz (35 kB)
2020-09-03T02:35:43.1080990Z Requirement already satisfied: setuptools>=45 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from briefcase==0.3.1) (47.1.0)
2020-09-03T02:35:43.2251660Z Collecting GitPython>=3.0.8
2020-09-03T02:35:43.2300760Z Downloading GitPython-3.1.7-py3-none-any.whl (158 kB)
2020-09-03T02:35:43.3256020Z Collecting Jinja2<3.0
2020-09-03T02:35:43.3305500Z Downloading Jinja2-2.11.2-py2.py3-none-any.whl (125 kB)
2020-09-03T02:35:43.3658170Z Collecting toml>=0.10.0
2020-09-03T02:35:43.3705810Z Downloading toml-0.10.1-py2.py3-none-any.whl (19 kB)
2020-09-03T02:35:43.3772210Z Requirement already satisfied: pip>=20 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from briefcase==0.3.1) (20.2.2)
2020-09-03T02:35:43.4486540Z Collecting cookiecutter>=1.0
2020-09-03T02:35:43.4536090Z Downloading cookiecutter-1.7.2-py2.py3-none-any.whl (34 kB)
2020-09-03T02:35:43.5961400Z Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
2020-09-03T02:35:43.6014240Z Downloading urllib3-1.25.10-py2.py3-none-any.whl (127 kB)
2020-09-03T02:35:43.7132810Z Collecting certifi>=2017.4.17
2020-09-03T02:35:43.7184670Z Downloading certifi-2020.6.20-py2.py3-none-any.whl (156 kB)
2020-09-03T02:35:43.7781270Z Collecting idna<3,>=2.5
2020-09-03T02:35:43.7831140Z Downloading idna-2.10-py2.py3-none-any.whl (58 kB)
2020-09-03T02:35:43.8209240Z Collecting chardet<4,>=3.0.2
2020-09-03T02:35:43.8267860Z Downloading chardet-3.0.4-py2.py3-none-any.whl (133 kB)
2020-09-03T02:35:43.8493490Z Collecting ds_store>=1.1.0
2020-09-03T02:35:43.8549370Z Downloading ds_store-1.1.2.tar.gz (13 kB)
2020-09-03T02:35:44.1835290Z Collecting mac_alias>=2.0.1
2020-09-03T02:35:44.1886830Z Downloading mac_alias-2.0.7.tar.gz (17 kB)
2020-09-03T02:35:44.5678940Z Collecting gitdb<5,>=4.0.1
2020-09-03T02:35:44.5680690Z Downloading gitdb-4.0.5-py3-none-any.whl (63 kB)
2020-09-03T02:35:44.6664200Z Collecting MarkupSafe>=0.23
2020-09-03T02:35:44.6665300Z Downloading MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl (16 kB)
2020-09-03T02:35:44.6909520Z Collecting binaryornot>=0.4.4
2020-09-03T02:35:44.6910820Z Downloading binaryornot-0.4.4-py2.py3-none-any.whl (9.0 kB)
2020-09-03T02:35:44.7514590Z Collecting click>=7.0
2020-09-03T02:35:44.7560730Z Downloading click-7.1.2-py2.py3-none-any.whl (82 kB)
2020-09-03T02:35:44.7892470Z Collecting poyo>=0.5.0
2020-09-03T02:35:44.7943210Z Downloading poyo-0.5.0-py2.py3-none-any.whl (10 kB)
2020-09-03T02:35:44.8496410Z Collecting six>=1.10
2020-09-03T02:35:44.8550860Z Downloading six-1.15.0-py2.py3-none-any.whl (10 kB)
2020-09-03T02:35:44.8851910Z Collecting python-slugify>=4.0.0
2020-09-03T02:35:44.8901340Z Downloading python-slugify-4.0.1.tar.gz (11 kB)
2020-09-03T02:35:45.2315810Z Collecting jinja2-time>=0.2.0
2020-09-03T02:35:45.2373350Z Downloading jinja2_time-0.2.0-py2.py3-none-any.whl (6.4 kB)
2020-09-03T02:35:45.2688950Z Collecting biplist>=0.6
2020-09-03T02:35:45.2746840Z Downloading biplist-1.0.3.tar.gz (21 kB)
2020-09-03T02:35:45.5845900Z Collecting smmap<4,>=3.0.1
2020-09-03T02:35:45.5894820Z Downloading smmap-3.0.4-py2.py3-none-any.whl (25 kB)
2020-09-03T02:35:45.6155270Z Collecting text-unidecode>=1.3
2020-09-03T02:35:45.6199670Z Downloading text_unidecode-1.3-py2.py3-none-any.whl (78 kB)
2020-09-03T02:35:45.7391840Z Collecting arrow
2020-09-03T02:35:45.7458520Z Downloading arrow-0.16.0-py2.py3-none-any.whl (50 kB)
2020-09-03T02:35:45.8153890Z Collecting python-dateutil>=2.7.0
2020-09-03T02:35:45.8207070Z Downloading python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
2020-09-03T02:35:45.8344490Z Using legacy 'setup.py install' for dmgbuild, since package 'wheel' is not installed.
2020-09-03T02:35:45.8346380Z Using legacy 'setup.py install' for ds-store, since package 'wheel' is not installed.
2020-09-03T02:35:45.8349050Z Using legacy 'setup.py install' for mac-alias, since package 'wheel' is not installed.
2020-09-03T02:35:45.8352230Z Using legacy 'setup.py install' for python-slugify, since package 'wheel' is not installed.
2020-09-03T02:35:45.8354260Z Using legacy 'setup.py install' for biplist, since package 'wheel' is not installed.
2020-09-03T02:35:45.9156650Z Installing collected packages: urllib3, certifi, idna, chardet, requests, biplist, mac-alias, ds-store, dmgbuild, wheel, smmap, gitdb, GitPython, MarkupSafe, Jinja2, toml, binaryornot, click, poyo, six, text-unidecode, python-slugify, python-dateutil, arrow, jinja2-time, cookiecutter, briefcase, tomlkit
2020-09-03T02:35:46.3505810Z Running setup.py install for biplist: started
2020-09-03T02:35:46.7487230Z Running setup.py install for biplist: finished with status 'done'
2020-09-03T02:35:46.7526230Z Running setup.py install for mac-alias: started
2020-09-03T02:35:47.1032730Z Running setup.py install for mac-alias: finished with status 'done'
2020-09-03T02:35:47.1080670Z Running setup.py install for ds-store: started
2020-09-03T02:35:47.4754010Z Running setup.py install for ds-store: finished with status 'done'
2020-09-03T02:35:47.4811900Z Running setup.py install for dmgbuild: started
2020-09-03T02:35:47.8218300Z Running setup.py install for dmgbuild: finished with status 'done'
2020-09-03T02:35:48.2922210Z Running setup.py install for python-slugify: started
2020-09-03T02:35:48.6092530Z Running setup.py install for python-slugify: finished with status 'done'
2020-09-03T02:35:48.9318670Z Successfully installed GitPython-3.1.7 Jinja2-2.11.2 MarkupSafe-1.1.1 arrow-0.16.0 binaryornot-0.4.4 biplist-1.0.3 briefcase-0.3.1 certifi-2020.6.20 chardet-3.0.4 click-7.1.2 cookiecutter-1.7.2 dmgbuild-1.3.3 ds-store-1.1.2 gitdb-4.0.5 idna-2.10 jinja2-time-0.2.0 mac-alias-2.0.7 poyo-0.5.0 python-dateutil-2.8.1 python-slugify-4.0.1 requests-2.24.0 six-1.15.0 smmap-3.0.4 text-unidecode-1.3 toml-0.10.1 tomlkit-0.7.0 urllib3-1.25.10 wheel-0.35.1
2020-09-03T02:35:49.3839770Z Obtaining file:///Users/runner/work/napari/napari
2020-09-03T02:35:49.3884950Z Installing build dependencies: started
2020-09-03T02:35:51.5866800Z Installing build dependencies: finished with status 'done'
2020-09-03T02:35:51.5879770Z Getting requirements to build wheel: started
2020-09-03T02:35:51.8010820Z Getting requirements to build wheel: finished with status 'done'
2020-09-03T02:35:51.8071570Z Preparing wheel metadata: started
2020-09-03T02:35:52.5959450Z Preparing wheel metadata: finished with status 'done'
2020-09-03T02:35:52.6063910Z Collecting napari-svg>=0.1.3
2020-09-03T02:35:52.6282400Z Downloading napari-svg-0.1.3.tar.gz (10 kB)
2020-09-03T02:35:53.0577530Z Collecting tifffile>=2020.2.16
2020-09-03T02:35:53.0661460Z Downloading tifffile-2020.8.25-py3-none-any.whl (147 kB)
2020-09-03T02:35:53.2818280Z Collecting dask[array]>=2.1.0
2020-09-03T02:35:53.2882390Z Downloading dask-2.25.0-py3-none-any.whl (834 kB)
2020-09-03T02:35:53.4628060Z Collecting PyYAML>=5.1
2020-09-03T02:35:53.4704380Z Downloading PyYAML-5.3.1.tar.gz (269 kB)
2020-09-03T02:35:54.1771120Z Collecting ipykernel>=5.1.1
2020-09-03T02:35:54.1844200Z Downloading ipykernel-5.3.4-py3-none-any.whl (120 kB)
2020-09-03T02:35:54.2381270Z Collecting wrapt>=1.11.1
2020-09-03T02:35:54.2427580Z Downloading wrapt-1.12.1.tar.gz (27 kB)
2020-09-03T02:35:54.6142800Z Collecting imageio>=2.5.0
2020-09-03T02:35:54.6261740Z Downloading imageio-2.9.0-py3-none-any.whl (3.3 MB)
2020-09-03T02:35:54.7973860Z Collecting napari-plugin-engine>=0.1.5
2020-09-03T02:35:54.8033720Z Downloading napari_plugin_engine-0.1.7-py3-none-any.whl (32 kB)
2020-09-03T02:35:54.8744360Z Collecting typing-extensions
2020-09-03T02:35:54.8794440Z Downloading typing_extensions-3.7.4.3-py3-none-any.whl (22 kB)
2020-09-03T02:35:54.9207640Z Collecting cachey>=0.2.1
2020-09-03T02:35:54.9260600Z Downloading cachey-0.2.1-py3-none-any.whl (6.4 kB)
2020-09-03T02:35:54.9702460Z Collecting appdirs>=1.4.4
2020-09-03T02:35:54.9773340Z Downloading appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)
2020-09-03T02:35:55.0710530Z Collecting vispy>=0.6.4
2020-09-03T02:35:55.0850730Z Downloading vispy-0.6.4.tar.gz (13.3 MB)
2020-09-03T02:35:56.1570300Z Installing build dependencies: started
2020-09-03T02:37:01.8876360Z Installing build dependencies: still running...
2020-09-03T02:38:27.2709340Z Installing build dependencies: still running...
2020-09-03T02:39:30.6465630Z Installing build dependencies: still running...
2020-09-03T02:39:30.9861290Z Installing build dependencies: finished with status 'done'
2020-09-03T02:39:31.0295450Z Getting requirements to build wheel: started
2020-09-03T02:39:36.5437790Z Getting requirements to build wheel: finished with status 'done'
2020-09-03T02:39:36.6745300Z Preparing wheel metadata: started
2020-09-03T02:39:38.6056150Z Preparing wheel metadata: finished with status 'done'
2020-09-03T02:39:39.6561220Z Collecting IPython>=7.7.0
2020-09-03T02:39:39.6691790Z Downloading ipython-7.18.1-py3-none-any.whl (786 kB)
2020-09-03T02:39:39.7791850Z Collecting toolz>=0.10.0
2020-09-03T02:39:39.7857070Z Downloading toolz-0.10.0.tar.gz (49 kB)
2020-09-03T02:39:40.7124980Z Collecting psutil>=5.0
2020-09-03T02:39:40.7260650Z Downloading psutil-5.7.2.tar.gz (460 kB)
2020-09-03T02:39:41.3298010Z Collecting PyOpenGL>=3.1.0
2020-09-03T02:39:41.3421680Z Downloading PyOpenGL-3.1.5-py3-none-any.whl (2.4 MB)
2020-09-03T02:39:41.9712930Z Collecting scipy>=1.2.0
2020-09-03T02:39:41.9783940Z Downloading scipy-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl (28.9 MB)
2020-09-03T02:39:43.5568630Z Collecting numpy>=1.10.0
2020-09-03T02:39:43.5623010Z Downloading numpy-1.19.1-cp38-cp38-macosx_10_9_x86_64.whl (15.3 MB)
2020-09-03T02:39:44.6446990Z Collecting Pillow!=7.1.0,!=7.1.1
2020-09-03T02:39:44.6680030Z Downloading Pillow-7.2.0-cp38-cp38-macosx_10_10_x86_64.whl (2.2 MB)
2020-09-03T02:39:44.8198150Z Collecting qtconsole>=4.5.1
2020-09-03T02:39:44.8291520Z Downloading qtconsole-4.7.6-py2.py3-none-any.whl (118 kB)
2020-09-03T02:39:44.8957160Z Collecting numpydoc>=0.9.2
2020-09-03T02:39:44.9031110Z Downloading numpydoc-1.1.0-py3-none-any.whl (47 kB)
2020-09-03T02:39:44.9841230Z Collecting qtpy>=1.7.0
2020-09-03T02:39:44.9902560Z Downloading QtPy-1.9.0-py2.py3-none-any.whl (54 kB)
2020-09-03T02:39:45.1331090Z Collecting PySide2<5.15.0,>=5.12.3; extra == "pyside2"
2020-09-03T02:39:45.1386120Z Downloading PySide2-5.14.2.3-5.14.2-cp35.cp36.cp37.cp38-abi3-macosx_10_13_intel.whl (148.4 MB)
2020-09-03T02:39:48.4805020Z Collecting appnope; platform_system == "Darwin"
2020-09-03T02:39:48.4858480Z Downloading appnope-0.1.0-py2.py3-none-any.whl (4.0 kB)
2020-09-03T02:39:48.5643600Z Collecting traitlets>=4.1.0
2020-09-03T02:39:48.5706200Z Downloading traitlets-5.0.2-py3-none-any.whl (97 kB)
2020-09-03T02:39:48.7216510Z Collecting jupyter-client
2020-09-03T02:39:48.7291560Z Downloading jupyter_client-6.1.7-py3-none-any.whl (108 kB)
2020-09-03T02:39:48.8812600Z Collecting tornado>=4.2
2020-09-03T02:39:48.8883320Z Downloading tornado-6.0.4.tar.gz (496 kB)
2020-09-03T02:39:49.4468880Z Collecting heapdict
2020-09-03T02:39:49.4552440Z Downloading HeapDict-1.0.1-py3-none-any.whl (3.9 kB)
2020-09-03T02:39:49.6193680Z Collecting freetype-py
2020-09-03T02:39:49.6362890Z Downloading freetype_py-2.2.0-py3-none-macosx_10_9_x86_64.whl (852 kB)
2020-09-03T02:39:49.6705490Z Requirement already satisfied: setuptools>=18.5 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from IPython>=7.7.0->napari==0.3.7rc3) (47.1.0)
2020-09-03T02:39:49.7222380Z Collecting backcall
2020-09-03T02:39:49.7281270Z Downloading backcall-0.2.0-py2.py3-none-any.whl (11 kB)
2020-09-03T02:39:49.7870510Z Collecting pexpect>4.3; sys_platform != "win32"
2020-09-03T02:39:49.7935350Z Downloading pexpect-4.8.0-py2.py3-none-any.whl (59 kB)
2020-09-03T02:39:49.9224170Z Collecting decorator
2020-09-03T02:39:49.9276780Z Downloading decorator-4.4.2-py2.py3-none-any.whl (9.2 kB)
2020-09-03T02:39:50.0503800Z Collecting pygments
2020-09-03T02:39:50.0611070Z Downloading Pygments-2.6.1-py3-none-any.whl (914 kB)
2020-09-03T02:39:50.1457410Z Collecting pickleshare
2020-09-03T02:39:50.1507220Z Downloading pickleshare-0.7.5-py2.py3-none-any.whl (6.9 kB)
2020-09-03T02:39:50.2731670Z Collecting jedi>=0.10
2020-09-03T02:39:50.2834900Z Downloading jedi-0.17.2-py2.py3-none-any.whl (1.4 MB)
2020-09-03T02:39:50.5139430Z Collecting prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0
2020-09-03T02:39:50.5212690Z Downloading prompt_toolkit-3.0.7-py3-none-any.whl (355 kB)
2020-09-03T02:39:50.6399960Z Collecting jupyter-core
2020-09-03T02:39:50.6461750Z Downloading jupyter_core-4.6.3-py2.py3-none-any.whl (83 kB)
2020-09-03T02:39:51.1129460Z Collecting pyzmq>=17.1
2020-09-03T02:39:51.1214760Z Downloading pyzmq-19.0.2-cp38-cp38-macosx_10_9_x86_64.whl (806 kB)
2020-09-03T02:39:51.1818800Z Collecting ipython-genutils
2020-09-03T02:39:51.1876600Z Downloading ipython_genutils-0.2.0-py2.py3-none-any.whl (26 kB)
2020-09-03T02:39:51.4711260Z Collecting sphinx>=1.6.5
2020-09-03T02:39:51.4831460Z Downloading Sphinx-3.2.1-py3-none-any.whl (2.9 MB)
2020-09-03T02:39:51.5902800Z Requirement already satisfied: Jinja2>=2.3 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from numpydoc>=0.9.2->napari==0.3.7rc3) (2.11.2)
2020-09-03T02:39:51.6749000Z Collecting shiboken2==5.14.2.3
2020-09-03T02:39:51.6859180Z Downloading shiboken2-5.14.2.3-5.14.2-cp35.cp36.cp37.cp38-abi3-macosx_10_13_intel.whl (837 kB)
2020-09-03T02:39:51.7200960Z Requirement already satisfied: python-dateutil>=2.1 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from jupyter-client->ipykernel>=5.1.1->napari==0.3.7rc3) (2.8.1)
2020-09-03T02:39:51.7554890Z Collecting ptyprocess>=0.5
2020-09-03T02:39:51.7620090Z Downloading ptyprocess-0.6.0-py2.py3-none-any.whl (39 kB)
2020-09-03T02:39:51.8849670Z Collecting parso<0.8.0,>=0.7.0
2020-09-03T02:39:51.8909480Z Downloading parso-0.7.1-py2.py3-none-any.whl (109 kB)
2020-09-03T02:39:51.9804230Z Collecting wcwidth
2020-09-03T02:39:51.9861070Z Downloading wcwidth-0.2.5-py2.py3-none-any.whl (30 kB)
2020-09-03T02:39:52.0375900Z Collecting sphinxcontrib-qthelp
2020-09-03T02:39:52.0428640Z Downloading sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl (90 kB)
2020-09-03T02:39:52.1618700Z Collecting babel>=1.3
2020-09-03T02:39:52.1683090Z Downloading Babel-2.8.0-py2.py3-none-any.whl (8.6 MB)
2020-09-03T02:39:52.5541610Z Collecting packaging
2020-09-03T02:39:52.5609070Z Downloading packaging-20.4-py2.py3-none-any.whl (37 kB)
2020-09-03T02:39:52.6074590Z Collecting snowballstemmer>=1.1
2020-09-03T02:39:52.6127860Z Downloading snowballstemmer-2.0.0-py2.py3-none-any.whl (97 kB)
2020-09-03T02:39:52.6574180Z Collecting sphinxcontrib-jsmath
2020-09-03T02:39:52.6629360Z Downloading sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl (5.1 kB)
2020-09-03T02:39:52.7290580Z Collecting docutils>=0.12
2020-09-03T02:39:52.7368660Z Downloading docutils-0.16-py2.py3-none-any.whl (548 kB)
2020-09-03T02:39:52.7973870Z Collecting sphinxcontrib-devhelp
2020-09-03T02:39:52.8058340Z Downloading sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl (84 kB)
2020-09-03T02:39:52.9085150Z Collecting imagesize
2020-09-03T02:39:52.9162720Z Downloading imagesize-1.2.0-py2.py3-none-any.whl (4.8 kB)
2020-09-03T02:39:52.9823670Z Collecting alabaster<0.8,>=0.7
2020-09-03T02:39:52.9876830Z Downloading alabaster-0.7.12-py2.py3-none-any.whl (14 kB)
2020-09-03T02:39:53.0473690Z Collecting sphinxcontrib-serializinghtml
2020-09-03T02:39:53.0544940Z Downloading sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl (89 kB)
2020-09-03T02:39:53.1053410Z Collecting sphinxcontrib-applehelp
2020-09-03T02:39:53.1118850Z Downloading sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl (121 kB)
2020-09-03T02:39:53.1658680Z Collecting sphinxcontrib-htmlhelp
2020-09-03T02:39:53.1714570Z Downloading sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl (96 kB)
2020-09-03T02:39:53.1981710Z Requirement already satisfied: requests>=2.5.0 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (2.24.0)
2020-09-03T02:39:53.2155500Z Requirement already satisfied: MarkupSafe>=0.23 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from Jinja2>=2.3->numpydoc>=0.9.2->napari==0.3.7rc3) (1.1.1)
2020-09-03T02:39:53.2170660Z Requirement already satisfied: six>=1.5 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from python-dateutil>=2.1->jupyter-client->ipykernel>=5.1.1->napari==0.3.7rc3) (1.15.0)
2020-09-03T02:39:53.5410330Z Collecting pytz>=2015.7
2020-09-03T02:39:53.5558830Z Downloading pytz-2020.1-py2.py3-none-any.whl (510 kB)
2020-09-03T02:39:53.8194700Z Collecting pyparsing>=2.0.2
2020-09-03T02:39:53.8336310Z Downloading pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
2020-09-03T02:39:53.8462500Z Requirement already satisfied: certifi>=2017.4.17 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from requests>=2.5.0->sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (2020.6.20)
2020-09-03T02:39:53.8474410Z Requirement already satisfied: chardet<4,>=3.0.2 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from requests>=2.5.0->sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (3.0.4)
2020-09-03T02:39:53.8490220Z Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from requests>=2.5.0->sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (1.25.10)
2020-09-03T02:39:53.8657670Z Requirement already satisfied: idna<3,>=2.5 in /Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages (from requests>=2.5.0->sphinx>=1.6.5->numpydoc>=0.9.2->napari==0.3.7rc3) (2.10)
2020-09-03T02:39:53.8683200Z Building wheels for collected packages: napari-svg, PyYAML, wrapt, vispy, toolz, psutil, tornado
2020-09-03T02:39:53.8692310Z Building wheel for napari-svg (setup.py): started
2020-09-03T02:39:54.2972390Z Building wheel for napari-svg (setup.py): finished with status 'done'
2020-09-03T02:39:54.2981920Z Created wheel for napari-svg: filename=napari_svg-0.1.3-py3-none-any.whl size=11972 sha256=b56b3fc5f59ea032cdafa816c21a75e8413e34e29d8bdaf55f7207fef867fd73
2020-09-03T02:39:54.2982900Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/96/12/d9/a0a4a8914067f0ab668322ba07270d05475000398c3530ff9a
2020-09-03T02:39:54.3005750Z Building wheel for PyYAML (setup.py): started
2020-09-03T02:40:01.8259850Z Building wheel for PyYAML (setup.py): finished with status 'done'
2020-09-03T02:40:01.8274300Z Created wheel for PyYAML: filename=PyYAML-5.3.1-cp38-cp38-macosx_10_14_x86_64.whl size=156398 sha256=89b1292cba9ce9e0d2cca6536afce3524cb40d858434046a4c63ee9a586b26b6
2020-09-03T02:40:01.8274640Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/13/90/db/290ab3a34f2ef0b5a0f89235dc2d40fea83e77de84ed2dc05c
2020-09-03T02:40:01.8294650Z Building wheel for wrapt (setup.py): started
2020-09-03T02:40:03.1518600Z Building wheel for wrapt (setup.py): finished with status 'done'
2020-09-03T02:40:03.1528330Z Created wheel for wrapt: filename=wrapt-1.12.1-cp38-cp38-macosx_10_14_x86_64.whl size=32551 sha256=a88ea091adec50c531d211172c22cd63f56e1773a3dbae6e7f466b2e2f65f47b
2020-09-03T02:40:03.1528680Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/5f/fd/9e/b6cf5890494cb8ef0b5eaff72e5d55a70fb56316007d6dfe73
2020-09-03T02:40:03.1570890Z Building wheel for vispy (PEP 517): started
2020-09-03T02:40:07.1746870Z Building wheel for vispy (PEP 517): finished with status 'done'
2020-09-03T02:40:07.1839520Z Created wheel for vispy: filename=vispy-0.6.4-cp38-cp38-macosx_10_14_x86_64.whl size=2241226 sha256=bca6969890582429a938dabdd7308e977084aac5c99b3a707251e9ab8e468125
2020-09-03T02:40:07.1840430Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/60/f9/ba/c802b3bb175c9f158667b47f71be5aab048f25cb5c46c69646
2020-09-03T02:40:07.1856340Z Building wheel for toolz (setup.py): started
2020-09-03T02:40:07.7105510Z Building wheel for toolz (setup.py): finished with status 'done'
2020-09-03T02:40:07.7118680Z Created wheel for toolz: filename=toolz-0.10.0-py3-none-any.whl size=55576 sha256=3397b54c3ad0cff79496927025731eaa738666d16c1dc747f8164357b05adfa5
2020-09-03T02:40:07.7118900Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/a5/2b/b5/05758d5828d65f2adef8fbb5d5484e4adb946ae1827a973a01
2020-09-03T02:40:07.7135860Z Building wheel for psutil (setup.py): started
2020-09-03T02:40:10.0479350Z Building wheel for psutil (setup.py): finished with status 'done'
2020-09-03T02:40:10.0498990Z Created wheel for psutil: filename=psutil-5.7.2-cp38-cp38-macosx_10_14_x86_64.whl size=234086 sha256=1900da33cbe77f7b739fec92353c49665a7eab652259fcf5f5e9bc932b5e0263
2020-09-03T02:40:10.0499240Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/91/cf/b0/0c9998060b55ca80ea7a50a8639c3bdc6ba886eeff014bc9ac
2020-09-03T02:40:10.0515840Z Building wheel for tornado (setup.py): started
2020-09-03T02:40:10.9626160Z Building wheel for tornado (setup.py): finished with status 'done'
2020-09-03T02:40:11.7233010Z Created wheel for tornado: filename=tornado-6.0.4-cp38-cp38-macosx_10_14_x86_64.whl size=417088 sha256=9304a9ebfd922a8bbb0ce05dc8e339ba5ec60c70aa60ef3c28f15208a01d71e3
2020-09-03T02:40:11.7233300Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/88/79/e5/598ba17e85eccf2626eab62e4ee8452895636cd542650d450d
2020-09-03T02:40:11.7233840Z Successfully built napari-svg PyYAML wrapt vispy toolz psutil tornado
2020-09-03T02:40:11.7234990Z Installing collected packages: Pillow, numpy, imageio, napari-plugin-engine, freetype-py, vispy, napari-svg, tifffile, PyYAML, toolz, dask, appnope, ipython-genutils, traitlets, pyzmq, tornado, jupyter-core, jupyter-client, backcall, ptyprocess, pexpect, decorator, pygments, pickleshare, parso, jedi, wcwidth, prompt-toolkit, IPython, ipykernel, wrapt, typing-extensions, heapdict, cachey, appdirs, psutil, PyOpenGL, scipy, qtpy, qtconsole, sphinxcontrib-qthelp, pytz, babel, pyparsing, packaging, snowballstemmer, sphinxcontrib-jsmath, docutils, sphinxcontrib-devhelp, imagesize, alabaster, sphinxcontrib-serializinghtml, sphinxcontrib-applehelp, sphinxcontrib-htmlhelp, sphinx, numpydoc, shiboken2, PySide2, napari
2020-09-03T02:40:34.4527210Z Running setup.py develop for napari
2020-09-03T02:40:35.3975740Z Successfully installed IPython-7.18.1 Pillow-7.2.0 PyOpenGL-3.1.5 PySide2-5.14.2.3 PyYAML-5.3.1 alabaster-0.7.12 appdirs-1.4.4 appnope-0.1.0 babel-2.8.0 backcall-0.2.0 cachey-0.2.1 dask-2.25.0 decorator-4.4.2 docutils-0.16 freetype-py-2.2.0 heapdict-1.0.1 imageio-2.9.0 imagesize-1.2.0 ipykernel-5.3.4 ipython-genutils-0.2.0 jedi-0.17.2 jupyter-client-6.1.7 jupyter-core-4.6.3 napari napari-plugin-engine-0.1.7 napari-svg-0.1.3 numpy-1.19.1 numpydoc-1.1.0 packaging-20.4 parso-0.7.1 pexpect-4.8.0 pickleshare-0.7.5 prompt-toolkit-3.0.7 psutil-5.7.2 ptyprocess-0.6.0 pygments-2.6.1 pyparsing-2.4.7 pytz-2020.1 pyzmq-19.0.2 qtconsole-4.7.6 qtpy-1.9.0 scipy-1.5.2 shiboken2-5.14.2.3 snowballstemmer-2.0.0 sphinx-3.2.1 sphinxcontrib-applehelp-1.0.2 sphinxcontrib-devhelp-1.0.2 sphinxcontrib-htmlhelp-1.0.3 sphinxcontrib-jsmath-1.0.1 sphinxcontrib-qthelp-1.0.3 sphinxcontrib-serializinghtml-1.1.4 tifffile-2020.8.25 toolz-0.10.0 tornado-6.0.4 traitlets-5.0.2 typing-extensions-3.7.4.3 vispy-0.6.4 wcwidth-0.2.5 wrapt-1.12.1
2020-09-03T02:40:36.2706120Z ##[group]Run VER=`python bundle.py --version`
2020-09-03T02:40:36.2706390Z �[36;1mVER=`python bundle.py --version`�[0m
2020-09-03T02:40:36.2706500Z �[36;1mecho "::set-env name=version::$VER"�[0m
2020-09-03T02:40:36.2706600Z �[36;1mecho $VER�[0m
2020-09-03T02:40:36.3044610Z shell: /bin/bash --noprofile --norc -e -o pipefail {0}
2020-09-03T02:40:36.3045170Z env:
2020-09-03T02:40:36.3046590Z GITHUB_TOKEN: ***
2020-09-03T02:40:36.3046710Z DISPLAY: :99.0
2020-09-03T02:40:36.3046830Z pythonLocation: /Users/runner/hostedtoolcache/Python/3.8.5/x64
2020-09-03T02:40:36.3046930Z ##[endgroup]
2020-09-03T02:40:36.3943780Z 0.3.7rc3
2020-09-03T02:40:36.3966720Z ##[group]Run python -m bundle
2020-09-03T02:40:36.3966910Z �[36;1mpython -m bundle�[0m
2020-09-03T02:40:36.4130090Z shell: /bin/bash -e {0}
2020-09-03T02:40:36.4130240Z env:
2020-09-03T02:40:36.4130750Z GITHUB_TOKEN: ***
2020-09-03T02:40:36.4130920Z DISPLAY: :99.0
2020-09-03T02:40:36.4131020Z pythonLocation: /Users/runner/hostedtoolcache/Python/3.8.5/x64
2020-09-03T02:40:36.4131150Z version: 0.3.7rc3
2020-09-03T02:40:36.4131280Z ##[endgroup]
2020-09-03T02:40:45.0358100Z Unable to create basic Accelerated OpenGL renderer.
2020-09-03T02:40:45.0360730Z Unable to create basic Accelerated OpenGL renderer.
2020-09-03T02:40:45.0364170Z Core Image is now using the software OpenGL renderer. This will be slow.
2020-09-03T02:40:45.3454630Z napari: 0.3.7rc3
2020-09-03T02:40:45.3455880Z Platform: macOS-10.15.6-x86_64-i386-64bit
2020-09-03T02:40:45.3456780Z Python: 3.8.5 (default, Jul 21 2020, 12:20:54) [Clang 11.0.0 (clang-1100.0.33.17)]
2020-09-03T02:40:45.3457010Z Qt: 5.14.2
2020-09-03T02:40:45.3457260Z PySide2: 5.14.2.3
2020-09-03T02:40:45.3457410Z NumPy: 1.19.1
2020-09-03T02:40:45.3457540Z SciPy: 1.5.2
2020-09-03T02:40:45.3457670Z Dask: 2.25.0
2020-09-03T02:40:45.3457800Z VisPy: 0.6.4
2020-09-03T02:40:45.3457860Z
2020-09-03T02:40:45.3458510Z GL version: 2.1 APPLE-17.10.22
2020-09-03T02:40:45.3458680Z MAX_TEXTURE_SIZE: 16384
2020-09-03T02:40:45.3458740Z
2020-09-03T02:40:45.3458870Z Plugins:
2020-09-03T02:40:45.3459780Z - napari_plugin_engine: 0.1.7
2020-09-03T02:40:45.3460400Z - svg: 0.1.3
2020-09-03T02:40:47.9385040Z
2020-09-03T02:40:47.9451560Z [napari] Generating application template...
2020-09-03T02:40:47.9453400Z Using app template: https://github.com/beeware/briefcase-macOS-app-template.git
2020-09-03T02:40:47.9567460Z
2020-09-03T02:40:47.9587980Z [napari] Installing support package...
2020-09-03T02:40:47.9620720Z Using support package https://briefcase-support.org/python?platform=macOS&version=3.8
2020-09-03T02:40:47.9625140Z ... using most recent revision
2020-09-03T02:40:47.9631410Z Downloading Python-3.8-macOS-support.b3.tar.gz...
2020-09-03T02:40:47.9631640Z
2020-09-03T02:40:48.0113290Z ######............................................ 12%
2020-09-03T02:40:48.0829080Z #############..................................... 26%
2020-09-03T02:40:48.1490490Z ####################.............................. 40%
2020-09-03T02:40:48.1563150Z ###########################....................... 54%
2020-09-03T02:40:48.2216300Z #################################................. 66%
2020-09-03T02:40:48.2273640Z ########################################.......... 80%
2020-09-03T02:40:48.2316450Z ###############################################... 94%
2020-09-03T02:40:49.1551270Z ################################################## 100%Ignoring importlib-metadata: markers 'python_version < "3.8"' don't match your environment
2020-09-03T02:40:49.4399680Z Collecting appdirs>=1.4.4
2020-09-03T02:40:49.4474270Z Using cached appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)
2020-09-03T02:40:49.4744440Z Collecting cachey>=0.2.1
2020-09-03T02:40:49.4777690Z Using cached cachey-0.2.1-py3-none-any.whl (6.4 kB)
2020-09-03T02:40:49.6660720Z Collecting dask[array]>=2.1.0
2020-09-03T02:40:49.6721850Z Using cached dask-2.25.0-py3-none-any.whl (834 kB)
2020-09-03T02:40:49.7635230Z Collecting imageio>=2.5.0
2020-09-03T02:40:49.7767750Z Using cached imageio-2.9.0-py3-none-any.whl (3.3 MB)
2020-09-03T02:40:49.9107170Z Collecting ipykernel>=5.1.1
2020-09-03T02:40:49.9146880Z Using cached ipykernel-5.3.4-py3-none-any.whl (120 kB)
2020-09-03T02:40:50.0892930Z Collecting IPython>=7.7.0
2020-09-03T02:40:50.0937160Z Using cached ipython-7.18.1-py3-none-any.whl (786 kB)
2020-09-03T02:40:50.1820240Z Collecting napari-plugin-engine>=0.1.5
2020-09-03T02:40:50.1860010Z Using cached napari_plugin_engine-0.1.7-py3-none-any.whl (32 kB)
2020-09-03T02:40:50.2133050Z Processing /Users/runner/Library/Caches/pip/wheels/96/12/d9/a0a4a8914067f0ab668322ba07270d05475000398c3530ff9a/napari_svg-0.1.3-py3-none-any.whl
2020-09-03T02:40:51.2184750Z Collecting numpy>=1.10.0
2020-09-03T02:40:51.2742670Z Using cached numpy-1.19.1-cp38-cp38-macosx_10_9_x86_64.whl (15.3 MB)
2020-09-03T02:40:51.4277760Z Collecting numpydoc>=0.9.2
2020-09-03T02:40:51.4312250Z Using cached numpydoc-1.1.0-py3-none-any.whl (47 kB)
2020-09-03T02:40:52.3438360Z Collecting Pillow!=7.1.0,!=7.1.1
2020-09-03T02:40:52.3550660Z Using cached Pillow-7.2.0-cp38-cp38-macosx_10_10_x86_64.whl (2.2 MB)
2020-09-03T02:40:52.7859740Z Processing /Users/runner/Library/Caches/pip/wheels/91/cf/b0/0c9998060b55ca80ea7a50a8639c3bdc6ba886eeff014bc9ac/psutil-5.7.2-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:40:52.8305390Z Collecting PyOpenGL>=3.1.0
2020-09-03T02:40:52.8412990Z Using cached PyOpenGL-3.1.5-py3-none-any.whl (2.4 MB)
2020-09-03T02:40:52.9781240Z Processing /Users/runner/Library/Caches/pip/wheels/13/90/db/290ab3a34f2ef0b5a0f89235dc2d40fea83e77de84ed2dc05c/PyYAML-5.3.1-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:40:53.0518640Z Collecting qtconsole>=4.5.1
2020-09-03T02:40:53.0560750Z Using cached qtconsole-4.7.6-py2.py3-none-any.whl (118 kB)
2020-09-03T02:40:53.1125350Z Collecting qtpy>=1.7.0
2020-09-03T02:40:53.1163800Z Using cached QtPy-1.9.0-py2.py3-none-any.whl (54 kB)
2020-09-03T02:40:53.6262670Z Collecting scipy>=1.2.0
2020-09-03T02:40:53.7264920Z Using cached scipy-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl (28.9 MB)
2020-09-03T02:40:54.0692690Z Collecting tifffile>=2020.2.16
2020-09-03T02:40:54.0747200Z Using cached tifffile-2020.8.25-py3-none-any.whl (147 kB)
2020-09-03T02:40:54.1082030Z Processing /Users/runner/Library/Caches/pip/wheels/a5/2b/b5/05758d5828d65f2adef8fbb5d5484e4adb946ae1827a973a01/toolz-0.10.0-py3-none-any.whl
2020-09-03T02:40:54.1550280Z Collecting typing_extensions
2020-09-03T02:40:54.1589100Z Using cached typing_extensions-3.7.4.3-py3-none-any.whl (22 kB)
2020-09-03T02:40:54.2299530Z Processing /Users/runner/Library/Caches/pip/wheels/60/f9/ba/c802b3bb175c9f158667b47f71be5aab048f25cb5c46c69646/vispy-0.6.4-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:40:54.2734070Z Processing /Users/runner/Library/Caches/pip/wheels/5f/fd/9e/b6cf5890494cb8ef0b5eaff72e5d55a70fb56316007d6dfe73/wrapt-1.12.1-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:40:54.5394230Z Collecting pip
2020-09-03T02:40:54.5981030Z Using cached pip-20.2.2-py2.py3-none-any.whl (1.5 MB)
2020-09-03T02:40:54.7042890Z Collecting PySide2==5.14.2.2
2020-09-03T02:40:58.7199270Z Downloading PySide2-5.14.2.2-5.14.2-cp35.cp36.cp37.cp38-abi3-macosx_10_13_intel.whl (148.4 MB)
2020-09-03T02:41:01.9538070Z Collecting scikit-image
2020-09-03T02:41:01.9606190Z Downloading scikit_image-0.17.2-cp38-cp38-macosx_10_13_x86_64.whl (12.2 MB)
2020-09-03T02:41:02.3319000Z Collecting zarr
2020-09-03T02:41:02.3434310Z Downloading zarr-2.4.0.tar.gz (3.3 MB)
2020-09-03T02:41:07.0822370Z Collecting heapdict
2020-09-03T02:41:07.0860100Z Using cached HeapDict-1.0.1-py3-none-any.whl (3.9 kB)
2020-09-03T02:41:07.1871560Z Collecting traitlets>=4.1.0
2020-09-03T02:41:07.1911740Z Using cached traitlets-5.0.2-py3-none-any.whl (97 kB)
2020-09-03T02:41:07.3214260Z Collecting jupyter-client
2020-09-03T02:41:07.3257950Z Using cached jupyter_client-6.1.7-py3-none-any.whl (108 kB)
2020-09-03T02:41:07.3601900Z Collecting appnope; platform_system == "Darwin"
2020-09-03T02:41:07.3634710Z Using cached appnope-0.1.0-py2.py3-none-any.whl (4.0 kB)
2020-09-03T02:41:07.4391480Z Processing /Users/runner/Library/Caches/pip/wheels/88/79/e5/598ba17e85eccf2626eab62e4ee8452895636cd542650d450d/tornado-6.0.4-cp38-cp38-macosx_10_14_x86_64.whl
2020-09-03T02:41:07.5423950Z Collecting jedi>=0.10
2020-09-03T02:41:07.5498900Z Using cached jedi-0.17.2-py2.py3-none-any.whl (1.4 MB)
2020-09-03T02:41:07.6322640Z Collecting pickleshare
2020-09-03T02:41:07.6355580Z Using cached pickleshare-0.7.5-py2.py3-none-any.whl (6.9 kB)
2020-09-03T02:41:07.7332450Z Collecting pygments
2020-09-03T02:41:07.7373580Z Using cached Pygments-2.6.1-py3-none-any.whl (914 kB)
2020-09-03T02:41:07.7664890Z Collecting backcall
2020-09-03T02:41:07.7712570Z Using cached backcall-0.2.0-py2.py3-none-any.whl (11 kB)
2020-09-03T02:41:09.0442260Z Collecting setuptools>=18.5
2020-09-03T02:41:09.0485910Z Using cached setuptools-50.1.0-py3-none-any.whl (784 kB)
2020-09-03T02:41:09.1642870Z Collecting pexpect>4.3; sys_platform != "win32"
2020-09-03T02:41:09.1677040Z Using cached pexpect-4.8.0-py2.py3-none-any.whl (59 kB)
2020-09-03T02:41:09.3221890Z Collecting prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0
2020-09-03T02:41:09.3276940Z Using cached prompt_toolkit-3.0.7-py3-none-any.whl (355 kB)
2020-09-03T02:41:09.4480460Z Collecting decorator
2020-09-03T02:41:09.4514030Z Using cached decorator-4.4.2-py2.py3-none-any.whl (9.2 kB)
2020-09-03T02:41:09.7287840Z Collecting sphinx>=1.6.5
2020-09-03T02:41:09.7416880Z Using cached Sphinx-3.2.1-py3-none-any.whl (2.9 MB)
2020-09-03T02:41:09.9063450Z Collecting Jinja2>=2.3
2020-09-03T02:41:09.9102160Z Using cached Jinja2-2.11.2-py2.py3-none-any.whl (125 kB)
2020-09-03T02:41:09.9352940Z Collecting ipython-genutils
2020-09-03T02:41:09.9386200Z Using cached ipython_genutils-0.2.0-py2.py3-none-any.whl (26 kB)
2020-09-03T02:41:10.3573510Z Collecting pyzmq>=17.1
2020-09-03T02:41:10.3640020Z Using cached pyzmq-19.0.2-cp38-cp38-macosx_10_9_x86_64.whl (806 kB)
2020-09-03T02:41:10.4607710Z Collecting jupyter-core
2020-09-03T02:41:10.4641930Z Using cached jupyter_core-4.6.3-py2.py3-none-any.whl (83 kB)
2020-09-03T02:41:10.6073990Z Collecting freetype-py
2020-09-03T02:41:10.6130830Z Using cached freetype_py-2.2.0-py3-none-macosx_10_9_x86_64.whl (852 kB)
2020-09-03T02:41:10.6927560Z Collecting shiboken2==5.14.2.2
2020-09-03T02:41:10.7030870Z Downloading shiboken2-5.14.2.2-5.14.2-cp35.cp36.cp37.cp38-abi3-macosx_10_13_intel.whl (834 kB)
2020-09-03T02:41:10.7865420Z Collecting networkx>=2.0
2020-09-03T02:41:10.8021370Z Downloading networkx-2.5-py3-none-any.whl (1.6 MB)
2020-09-03T02:41:11.3343770Z Collecting matplotlib!=3.0.0,>=2.0.0
2020-09-03T02:41:11.3448330Z Downloading matplotlib-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl (8.5 MB)
2020-09-03T02:41:11.7170830Z Collecting PyWavelets>=1.1.1
2020-09-03T02:41:11.7390120Z Downloading PyWavelets-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl (4.3 MB)
2020-09-03T02:41:11.8591060Z Collecting asciitree
2020-09-03T02:41:11.8645460Z Downloading asciitree-0.3.3.tar.gz (4.0 kB)
2020-09-03T02:41:12.2800200Z Collecting fasteners
2020-09-03T02:41:12.2880100Z Downloading fasteners-0.15-py2.py3-none-any.whl (23 kB)
2020-09-03T02:41:12.3425610Z Collecting numcodecs>=0.6.4
2020-09-03T02:41:12.3592890Z Downloading numcodecs-0.6.4.tar.gz (3.8 MB)
2020-09-03T02:41:16.2554350Z Collecting python-dateutil>=2.1
2020-09-03T02:41:16.2603290Z Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
2020-09-03T02:41:16.3085800Z Collecting parso<0.8.0,>=0.7.0
2020-09-03T02:41:16.3129770Z Using cached parso-0.7.1-py2.py3-none-any.whl (109 kB)
2020-09-03T02:41:16.3435910Z Collecting ptyprocess>=0.5
2020-09-03T02:41:16.3469830Z Using cached ptyprocess-0.6.0-py2.py3-none-any.whl (39 kB)
2020-09-03T02:41:16.4113450Z Collecting wcwidth
2020-09-03T02:41:16.4148380Z Using cached wcwidth-0.2.5-py2.py3-none-any.whl (30 kB)
2020-09-03T02:41:16.7047770Z Collecting requests>=2.5.0
2020-09-03T02:41:16.7082870Z Using cached requests-2.24.0-py2.py3-none-any.whl (61 kB)
2020-09-03T02:41:16.7957220Z Collecting babel>=1.3
2020-09-03T02:41:16.8235950Z Using cached Babel-2.8.0-py2.py3-none-any.whl (8.6 MB)
2020-09-03T02:41:16.9270840Z Collecting snowballstemmer>=1.1
2020-09-03T02:41:16.9305520Z Using cached snowballstemmer-2.0.0-py2.py3-none-any.whl (97 kB)
2020-09-03T02:41:16.9555860Z Collecting sphinxcontrib-devhelp
2020-09-03T02:41:16.9587650Z Using cached sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl (84 kB)
2020-09-03T02:41:17.0132190Z Collecting docutils>=0.12
2020-09-03T02:41:17.0179410Z Using cached docutils-0.16-py2.py3-none-any.whl (548 kB)
2020-09-03T02:41:17.0950950Z Collecting alabaster<0.8,>=0.7
2020-09-03T02:41:17.0986410Z Using cached alabaster-0.7.12-py2.py3-none-any.whl (14 kB)
2020-09-03T02:41:17.1204700Z Collecting sphinxcontrib-jsmath
2020-09-03T02:41:17.1236010Z Using cached sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl (5.1 kB)
2020-09-03T02:41:17.1656770Z Collecting imagesize
2020-09-03T02:41:17.1687770Z Using cached imagesize-1.2.0-py2.py3-none-any.whl (4.8 kB)
2020-09-03T02:41:17.2115070Z Collecting sphinxcontrib-serializinghtml
2020-09-03T02:41:17.2150660Z Using cached sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl (89 kB)
2020-09-03T02:41:17.2460570Z Collecting sphinxcontrib-applehelp
2020-09-03T02:41:17.2497690Z Using cached sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl (121 kB)
2020-09-03T02:41:17.2898000Z Collecting sphinxcontrib-qthelp
2020-09-03T02:41:17.2930700Z Using cached sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl (90 kB)
2020-09-03T02:41:17.4307010Z Collecting packaging
2020-09-03T02:41:17.4343600Z Using cached packaging-20.4-py2.py3-none-any.whl (37 kB)
2020-09-03T02:41:17.4660630Z Collecting sphinxcontrib-htmlhelp
2020-09-03T02:41:17.4694320Z Using cached sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl (96 kB)
2020-09-03T02:41:17.5392560Z Collecting MarkupSafe>=0.23
2020-09-03T02:41:17.5426560Z Using cached MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl (16 kB)
2020-09-03T02:41:17.5867180Z Collecting certifi>=2020.06.20
2020-09-03T02:41:17.5906980Z Using cached certifi-2020.6.20-py2.py3-none-any.whl (156 kB)
2020-09-03T02:41:17.6490500Z Collecting kiwisolver>=1.0.1
2020-09-03T02:41:17.6552170Z Downloading kiwisolver-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl (60 kB)
2020-09-03T02:41:17.8285360Z Collecting pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3
2020-09-03T02:41:17.8326130Z Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
2020-09-03T02:41:17.8551920Z Collecting cycler>=0.10
2020-09-03T02:41:17.8601540Z Downloading cycler-0.10.0-py2.py3-none-any.whl (6.5 kB)
2020-09-03T02:41:17.9439770Z Collecting six
2020-09-03T02:41:17.9474430Z Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
2020-09-03T02:41:17.9847280Z Collecting monotonic>=0.1
2020-09-03T02:41:17.9901560Z Downloading monotonic-1.5-py2.py3-none-any.whl (5.3 kB)
2020-09-03T02:41:18.1080580Z Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
2020-09-03T02:41:18.1114400Z Using cached urllib3-1.25.10-py2.py3-none-any.whl (127 kB)
2020-09-03T02:41:18.1688530Z Collecting idna<3,>=2.5
2020-09-03T02:41:18.1721260Z Using cached idna-2.10-py2.py3-none-any.whl (58 kB)
2020-09-03T02:41:18.2051430Z Collecting chardet<4,>=3.0.2
2020-09-03T02:41:18.2085000Z Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
2020-09-03T02:41:18.4482410Z Collecting pytz>=2015.7
2020-09-03T02:41:18.4533040Z Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
2020-09-03T02:41:18.4687840Z Building wheels for collected packages: zarr, asciitree, numcodecs
2020-09-03T02:41:18.4697280Z Building wheel for zarr (setup.py): started
2020-09-03T02:41:19.2775590Z Building wheel for zarr (setup.py): finished with status 'done'
2020-09-03T02:41:19.2792370Z Created wheel for zarr: filename=zarr-2.4.0-py3-none-any.whl size=127065 sha256=28f0b6b7ca3ac4b7576e535370aa122283c0065d8c890fc32a363680791169b9
2020-09-03T02:41:19.2793050Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/73/45/aa/2472a351a13ce1a2a4fe06149435fd6ffe37c31562037035f8
2020-09-03T02:41:19.2820360Z Building wheel for asciitree (setup.py): started
2020-09-03T02:41:19.7262010Z Building wheel for asciitree (setup.py): finished with status 'done'
2020-09-03T02:41:19.7269550Z Created wheel for asciitree: filename=asciitree-0.3.3-py3-none-any.whl size=5035 sha256=19dbca1fbbc1378ac4bae0b4fa8b23bdabb5cd4412caa4bcfd01fea29b260ce1
2020-09-03T02:41:19.7269880Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/a3/d7/75/19cd0d2a893cad4bb0b2b16dd572ad2916d19c0d5ee9612511
2020-09-03T02:41:19.7287890Z Building wheel for numcodecs (setup.py): started
2020-09-03T02:42:20.9372110Z Building wheel for numcodecs (setup.py): still running...
2020-09-03T02:43:11.6848020Z Building wheel for numcodecs (setup.py): finished with status 'done'
2020-09-03T02:43:11.6890190Z Created wheel for numcodecs: filename=numcodecs-0.6.4-cp38-cp38-macosx_10_14_x86_64.whl size=1207253 sha256=367f4cab4fd6ecd0c0687510215f9ebc8fa1b3a4f75ba547ca0f23528976b60c
2020-09-03T02:43:11.6890430Z Stored in directory: /Users/runner/Library/Caches/pip/wheels/06/84/42/9947cf5ce52463b17e99e7c89be03025256fe4e1c534d184cb
2020-09-03T02:43:11.6902650Z Successfully built zarr asciitree numcodecs
2020-09-03T02:43:12.7127470Z Installing collected packages: appdirs, heapdict, cachey, PyYAML, numpy, toolz, dask, Pillow, imageio, parso, jedi, appnope, ipython-genutils, traitlets, pickleshare, pygments, backcall, setuptools, ptyprocess, pexpect, wcwidth, prompt-toolkit, decorator, IPython, six, python-dateutil, jupyter-core, pyzmq, tornado, jupyter-client, ipykernel, napari-plugin-engine, freetype-py, vispy, napari-svg, urllib3, idna, certifi, chardet, requests, pytz, babel, snowballstemmer, sphinxcontrib-devhelp, docutils, alabaster, sphinxcontrib-jsmath, imagesize, sphinxcontrib-serializinghtml, sphinxcontrib-applehelp, sphinxcontrib-qthelp, MarkupSafe, Jinja2, pyparsing, packaging, sphinxcontrib-htmlhelp, sphinx, numpydoc, psutil, PyOpenGL, qtpy, qtconsole, scipy, tifffile, typing-extensions, wrapt, pip, shiboken2, PySide2, networkx, kiwisolver, cycler, matplotlib, PyWavelets, scikit-image, asciitree, monotonic, fasteners, numcodecs, zarr
2020-09-03T02:43:46.0716570Z Successfully installed IPython-7.18.1 Jinja2-2.11.2 MarkupSafe-1.1.1 Pillow-7.2.0 PyOpenGL-3.1.5 PySide2-5.14.2.2 PyWavelets-1.1.1 PyYAML-5.3.1 alabaster-0.7.12 appdirs-1.4.4 appnope-0.1.0 asciitree-0.3.3 babel-2.8.0 backcall-0.2.0 cachey-0.2.1 certifi-2020.6.20 chardet-3.0.4 cycler-0.10.0 dask-2.25.0 decorator-4.4.2 docutils-0.16 fasteners-0.15 freetype-py-2.2.0 heapdict-1.0.1 idna-2.10 imageio-2.9.0 imagesize-1.2.0 ipykernel-5.3.4 ipython-genutils-0.2.0 jedi-0.17.2 jupyter-client-6.1.7 jupyter-core-4.6.3 kiwisolver-1.2.0 matplotlib-3.3.1 monotonic-1.5 napari-plugin-engine-0.1.7 napari-svg-0.1.3 networkx-2.5 numcodecs-0.6.4 numpy-1.19.1 numpydoc-1.1.0 packaging-20.4 parso-0.7.1 pexpect-4.8.0 pickleshare-0.7.5 pip-20.2.2 prompt-toolkit-3.0.7 psutil-5.7.2 ptyprocess-0.6.0 pygments-2.6.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 pyzmq-19.0.2 qtconsole-4.7.6 qtpy-1.9.0 requests-2.24.0 scikit-image-0.17.2 scipy-1.5.2 setuptools-50.1.0 shiboken2-5.14.2.2 six-1.15.0 snowballstemmer-2.0.0 sphinx-3.2.1 sphinxcontrib-applehelp-1.0.2 sphinxcontrib-devhelp-1.0.2 sphinxcontrib-htmlhelp-1.0.3 sphinxcontrib-jsmath-1.0.1 sphinxcontrib-qthelp-1.0.3 sphinxcontrib-serializinghtml-1.1.4 tifffile-2020.8.25 toolz-0.10.0 tornado-6.0.4 traitlets-5.0.2 typing-extensions-3.7.4.3 urllib3-1.25.10 vispy-0.6.4 wcwidth-0.2.5 wrapt-1.12.1 zarr-2.4.0
2020-09-03T02:43:47.9231030Z
2020-09-03T02:43:47.9231450Z Unpacking support package...
2020-09-03T02:43:47.9231530Z
2020-09-03T02:43:47.9231660Z [napari] Installing dependencies...
2020-09-03T02:43:47.9231730Z
2020-09-03T02:43:47.9232030Z [napari] Installing application code...
2020-09-03T02:43:47.9233380Z Installing napari...
2020-09-03T02:43:47.9233900Z
2020-09-03T02:43:47.9234310Z [napari] Installing application resources...
2020-09-03T02:43:47.9234600Z Installing resources/icon.icns as application icon...
2020-09-03T02:43:47.9234740Z
2020-09-03T02:43:47.9234960Z [napari] Created macOS/napari
2020-09-03T02:43:49.2999920Z
2020-09-03T02:43:49.3001460Z [napari] Built macOS/napari/napari.app
2020-09-03T02:43:59.3788940Z hdiutil: attach: WARNING: ignoring IDME options (obsolete)
2020-09-03T02:46:30.5785620Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:46:30.5787360Z hdiutil: detach: drive not detached
2020-09-03T02:48:31.6296690Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:48:31.6298410Z hdiutil: detach: drive not detached
2020-09-03T02:50:32.8600140Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:50:32.8601770Z hdiutil: detach: drive not detached
2020-09-03T02:52:33.9782130Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:52:33.9782560Z hdiutil: detach: drive not detached
2020-09-03T02:54:35.2440930Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:54:35.2441760Z hdiutil: detach: drive not detached
2020-09-03T02:56:36.4797740Z hdiutil: detach: timeout for DiskArbitration expired
2020-09-03T02:56:36.4798020Z hdiutil: detach: drive not detached
2020-09-03T02:56:36.4811120Z
2020-09-03T02:56:36.4811350Z [napari] Building DMG...
2020-09-03T02:56:36.4823550Z Traceback (most recent call last):
2020-09-03T02:56:36.4826240Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/bin/briefcase", line 8, in <module>
2020-09-03T02:56:36.4832060Z sys.exit(main())
2020-09-03T02:56:36.4833530Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/briefcase/__main__.py", line 11, in main
2020-09-03T02:56:36.4833740Z command(**options)
2020-09-03T02:56:36.4834410Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/briefcase/commands/package.py", line 64, in __call__
2020-09-03T02:56:36.4834580Z state = self._package_app(app, update=update, **full_options(state, options))
2020-09-03T02:56:36.4835320Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/briefcase/commands/package.py", line 41, in _package_app
2020-09-03T02:56:36.4835560Z state = self.package_app(app, **full_options(state, options))
2020-09-03T02:56:36.4836220Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/briefcase/platforms/macOS/dmg.py", line 126, in package_app
2020-09-03T02:56:36.4836380Z self.dmgbuild.build_dmg(
2020-09-03T02:56:36.4836970Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/site-packages/dmgbuild/core.py", line 561, in build_dmg
2020-09-03T02:56:36.8156560Z ##[error] raise DMGError('Unable to detach device cleanly')
2020-09-03T02:56:36.8164970Z dmgbuild.core.DMGError: Unable to detach device cleanly
2020-09-03T02:56:37.2347440Z Traceback (most recent call last):
2020-09-03T02:56:37.2348770Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/runpy.py", line 194, in _run_module_as_main
2020-09-03T02:56:37.2349390Z return _run_code(code, main_globals, None,
2020-09-03T02:56:37.2350010Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/runpy.py", line 87, in _run_code
2020-09-03T02:56:37.2350320Z exec(code, run_globals)
2020-09-03T02:56:37.2350720Z File "/Users/runner/work/napari/napari/bundle.py", line 178, in <module>
2020-09-03T02:56:37.2352190Z print('created', bundle())
2020-09-03T02:56:37.2352520Z File "/Users/runner/work/napari/napari/bundle.py", line 159, in bundle
2020-09-03T02:56:37.2352760Z subprocess.check_call(cmd)
2020-09-03T02:56:37.2353080Z File "/Users/runner/hostedtoolcache/Python/3.8.5/x64/lib/python3.8/subprocess.py", line 364, in check_call
2020-09-03T02:56:37.2353380Z raise CalledProcessError(retcode, cmd)
2020-09-03T02:56:37.2354370Z subprocess.CalledProcessError: Command '['briefcase', 'package', '--no-sign']' returned non-zero exit status 1.
2020-09-03T02:56:37.2354630Z patched dmgbuild.core
2020-09-03T02:56:37.2354890Z updating pyproject.toml to version: 0.3.7rc3
2020-09-03T02:56:37.2356010Z created site-packages at /Users/runner/work/napari/napari/macOS/napari/napari.app/Contents/Resources/Support/lib/python3.8/site-packages
2020-09-03T02:56:37.2557870Z ##[error]Process completed with exit code 1.
2020-09-03T02:56:37.4868680Z Post job cleanup.
2020-09-03T02:56:38.3289540Z [command]/usr/local/bin/git version
2020-09-03T02:56:38.3406170Z git version 2.28.0
2020-09-03T02:56:38.3465130Z [command]/usr/local/bin/git config --local --name-only --get-regexp core\.sshCommand
2020-09-03T02:56:38.3561340Z [command]/usr/local/bin/git submodule foreach --recursive git config --local --name-only --get-regexp 'core\.sshCommand' && git config --local --unset-all 'core.sshCommand' || :
2020-09-03T02:56:39.0875020Z [command]/usr/local/bin/git config --local --name-only --get-regexp http\.https\:\/\/github\.com\/\.extraheader
2020-09-03T02:56:39.0946780Z http.https://github.com/.extraheader
2020-09-03T02:56:39.0969160Z [command]/usr/local/bin/git config --local --unset-all http.https://github.com/.extraheader
2020-09-03T02:56:39.3212090Z [command]/usr/local/bin/git submodule foreach --recursive git config --local --name-only --get-regexp 'http\.https\:\/\/github\.com\/\.extraheader' && git config --local --unset-all 'http.https://github.com/.extraheader' || :
2020-09-03T02:56:39.5687830Z Cleaning up orphan processes
2020-09-03T02:56:40.0263370Z Terminate orphan process: pid (2567) (diskimages-help)
|
DMGError
|
def _on_data_change(self, event=None):
"""update the display"""
# update the shaders
self.track_shader.current_time = self.layer.current_time
self.graph_shader.current_time = self.layer.current_time
# add text labels if they're visible
if self.node._subvisuals[1].visible:
labels_text, labels_pos = self.layer.track_labels
self.node._subvisuals[1].text = labels_text
self.node._subvisuals[1].pos = labels_pos
self.node.update()
# Call to update order of translation values with new dims:
self._on_matrix_change()
|
def _on_data_change(self, event=None):
"""update the display"""
# update the shaders
self.track_shader.current_time = self.layer.current_time
self.graph_shader.current_time = self.layer.current_time
# add text labels if they're visible
if self.node._subvisuals[1].visible:
labels_text, labels_pos = self.layer.track_labels
self.node._subvisuals[1].text = labels_text
self.node._subvisuals[1].pos = labels_pos
self.node.update()
# Call to update order of translation values with new dims:
self._on_scale_change()
self._on_translate_change()
|
https://github.com/napari/napari/issues/1745
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-10-05f52b983105> in <module>
1 v = napari.Viewer()
----> 2 v.add_tracks(tracks, name='ilastik tracks')
<string> in add_tracks(self, data, properties, graph, tail_width, tail_length, name, metadata, scale, translate, opacity, blending, visible, colormap, color_by, colormaps_dict)
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/components/add_layers_mixin.py in add_layer(self, layer)
48 layer.events.cursor_size.connect(self._update_cursor_size)
49 layer.events.data.connect(self._on_layers_change)
---> 50 self.layers.append(layer)
51 self._update_layers(layers=[layer])
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/list/_model.py in append(self, obj)
57 def append(self, obj):
58 TypedList.append(self, obj)
---> 59 self.events.added(item=obj, index=len(self) - 1)
60
61 def pop(self, key):
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in __call__(self, *args, **kwargs)
512 continue
513
--> 514 self._invoke_callback(cb, event)
515 if event.blocked:
516 break
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
533 self.print_callback_errors,
534 self,
--> 535 cb_event=(cb, event),
536 )
537
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
527 def _invoke_callback(self, cb, event):
528 try:
--> 529 cb(event)
530 except Exception:
531 _handle_exception(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_qt/qt_viewer.py in _add_layer(self, event)
281 layers = event.source
282 layer = event.item
--> 283 vispy_layer = create_vispy_visual(layer)
284 vispy_layer.node.parent = self.view.scene
285 vispy_layer.order = len(layers) - 1
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/utils.py in create_vispy_visual(layer)
32 for layer_type, visual in layer_to_visual.items():
33 if isinstance(layer, layer_type):
---> 34 return visual(layer)
35
36 raise TypeError(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in __init__(self, layer)
48 self._reset_base()
49
---> 50 self._on_data_change()
51 self._on_appearance_change()
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in _on_data_change(self, event)
66 self.node.update()
67 # Call to update order of translation values with new dims:
---> 68 self._on_scale_change()
69 self._on_translate_change()
70
AttributeError: 'VispyTracksLayer' object has no attribute '_on_scale_change'
|
AttributeError
|
def _on_tracks_change(self, event=None):
"""update the shader when the track data changes"""
self.track_shader.use_fade = self.layer.use_fade
self.track_shader.tail_length = self.layer.tail_length
self.track_shader.vertex_time = self.layer.track_times
# change the data to the vispy line visual
self.node._subvisuals[0].set_data(
pos=self.layer._view_data,
connect=self.layer.track_connex,
width=self.layer.tail_width,
color=self.layer.track_colors,
)
# Call to update order of translation values with new dims:
self._on_matrix_change()
|
def _on_tracks_change(self, event=None):
"""update the shader when the track data changes"""
self.track_shader.use_fade = self.layer.use_fade
self.track_shader.tail_length = self.layer.tail_length
self.track_shader.vertex_time = self.layer.track_times
# change the data to the vispy line visual
self.node._subvisuals[0].set_data(
pos=self.layer._view_data,
connect=self.layer.track_connex,
width=self.layer.tail_width,
color=self.layer.track_colors,
)
# Call to update order of translation values with new dims:
self._on_scale_change()
self._on_translate_change()
|
https://github.com/napari/napari/issues/1745
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-10-05f52b983105> in <module>
1 v = napari.Viewer()
----> 2 v.add_tracks(tracks, name='ilastik tracks')
<string> in add_tracks(self, data, properties, graph, tail_width, tail_length, name, metadata, scale, translate, opacity, blending, visible, colormap, color_by, colormaps_dict)
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/components/add_layers_mixin.py in add_layer(self, layer)
48 layer.events.cursor_size.connect(self._update_cursor_size)
49 layer.events.data.connect(self._on_layers_change)
---> 50 self.layers.append(layer)
51 self._update_layers(layers=[layer])
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/list/_model.py in append(self, obj)
57 def append(self, obj):
58 TypedList.append(self, obj)
---> 59 self.events.added(item=obj, index=len(self) - 1)
60
61 def pop(self, key):
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in __call__(self, *args, **kwargs)
512 continue
513
--> 514 self._invoke_callback(cb, event)
515 if event.blocked:
516 break
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
533 self.print_callback_errors,
534 self,
--> 535 cb_event=(cb, event),
536 )
537
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
527 def _invoke_callback(self, cb, event):
528 try:
--> 529 cb(event)
530 except Exception:
531 _handle_exception(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_qt/qt_viewer.py in _add_layer(self, event)
281 layers = event.source
282 layer = event.item
--> 283 vispy_layer = create_vispy_visual(layer)
284 vispy_layer.node.parent = self.view.scene
285 vispy_layer.order = len(layers) - 1
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/utils.py in create_vispy_visual(layer)
32 for layer_type, visual in layer_to_visual.items():
33 if isinstance(layer, layer_type):
---> 34 return visual(layer)
35
36 raise TypeError(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in __init__(self, layer)
48 self._reset_base()
49
---> 50 self._on_data_change()
51 self._on_appearance_change()
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in _on_data_change(self, event)
66 self.node.update()
67 # Call to update order of translation values with new dims:
---> 68 self._on_scale_change()
69 self._on_translate_change()
70
AttributeError: 'VispyTracksLayer' object has no attribute '_on_scale_change'
|
AttributeError
|
def _on_graph_change(self, event=None):
"""update the shader when the graph data changes"""
self.graph_shader.use_fade = self.layer.use_fade
self.graph_shader.tail_length = self.layer.tail_length
self.graph_shader.vertex_time = self.layer.graph_times
# if the user clears a graph after it has been created, vispy offers
# no method to clear the data, therefore, we need to set private
# attributes to None to prevent errors
if self.layer._view_graph is None:
self.node._subvisuals[2]._pos = None
self.node._subvisuals[2]._connect = None
self.node.update()
return
self.node._subvisuals[2].set_data(
pos=self.layer._view_graph,
connect=self.layer.graph_connex,
width=self.layer.tail_width,
color="white",
)
# Call to update order of translation values with new dims:
self._on_matrix_change()
|
def _on_graph_change(self, event=None):
"""update the shader when the graph data changes"""
self.graph_shader.use_fade = self.layer.use_fade
self.graph_shader.tail_length = self.layer.tail_length
self.graph_shader.vertex_time = self.layer.graph_times
# if the user clears a graph after it has been created, vispy offers
# no method to clear the data, therefore, we need to set private
# attributes to None to prevent errors
if self.layer._view_graph is None:
self.node._subvisuals[2]._pos = None
self.node._subvisuals[2]._connect = None
self.node.update()
return
self.node._subvisuals[2].set_data(
pos=self.layer._view_graph,
connect=self.layer.graph_connex,
width=self.layer.tail_width,
color="white",
)
# Call to update order of translation values with new dims:
self._on_scale_change()
self._on_translate_change()
|
https://github.com/napari/napari/issues/1745
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-10-05f52b983105> in <module>
1 v = napari.Viewer()
----> 2 v.add_tracks(tracks, name='ilastik tracks')
<string> in add_tracks(self, data, properties, graph, tail_width, tail_length, name, metadata, scale, translate, opacity, blending, visible, colormap, color_by, colormaps_dict)
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/components/add_layers_mixin.py in add_layer(self, layer)
48 layer.events.cursor_size.connect(self._update_cursor_size)
49 layer.events.data.connect(self._on_layers_change)
---> 50 self.layers.append(layer)
51 self._update_layers(layers=[layer])
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/list/_model.py in append(self, obj)
57 def append(self, obj):
58 TypedList.append(self, obj)
---> 59 self.events.added(item=obj, index=len(self) - 1)
60
61 def pop(self, key):
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in __call__(self, *args, **kwargs)
512 continue
513
--> 514 self._invoke_callback(cb, event)
515 if event.blocked:
516 break
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
533 self.print_callback_errors,
534 self,
--> 535 cb_event=(cb, event),
536 )
537
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
527 def _invoke_callback(self, cb, event):
528 try:
--> 529 cb(event)
530 except Exception:
531 _handle_exception(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_qt/qt_viewer.py in _add_layer(self, event)
281 layers = event.source
282 layer = event.item
--> 283 vispy_layer = create_vispy_visual(layer)
284 vispy_layer.node.parent = self.view.scene
285 vispy_layer.order = len(layers) - 1
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/utils.py in create_vispy_visual(layer)
32 for layer_type, visual in layer_to_visual.items():
33 if isinstance(layer, layer_type):
---> 34 return visual(layer)
35
36 raise TypeError(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in __init__(self, layer)
48 self._reset_base()
49
---> 50 self._on_data_change()
51 self._on_appearance_change()
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in _on_data_change(self, event)
66 self.node.update()
67 # Call to update order of translation values with new dims:
---> 68 self._on_scale_change()
69 self._on_translate_change()
70
AttributeError: 'VispyTracksLayer' object has no attribute '_on_scale_change'
|
AttributeError
|
def __init__(
self,
data,
*,
properties=None,
graph=None,
tail_width=2,
tail_length=30,
name=None,
metadata=None,
scale=None,
translate=None,
rotate=None,
shear=None,
affine=None,
opacity=1,
blending="additive",
visible=True,
colormap="turbo",
color_by="track_id",
colormaps_dict=None,
):
# if not provided with any data, set up an empty layer in 2D+t
if data is None:
data = np.empty((0, 4))
else:
# convert data to a numpy array if it is not already one
data = np.asarray(data)
# in absence of properties make the default an empty dict
if properties is None:
properties = {}
# set the track data dimensions (remove ID from data)
ndim = data.shape[1] - 1
super().__init__(
data,
ndim,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
rotate=rotate,
shear=shear,
affine=affine,
opacity=opacity,
blending=blending,
visible=visible,
)
self.events.add(
tail_width=Event,
tail_length=Event,
display_id=Event,
display_tail=Event,
display_graph=Event,
color_by=Event,
colormap=Event,
properties=Event,
rebuild_tracks=Event,
rebuild_graph=Event,
)
# track manager deals with data slicing, graph building and properties
self._manager = TrackManager()
self._track_colors = None
self._colormaps_dict = colormaps_dict or {} # additional colormaps
self._color_by = color_by # default color by ID
self._colormap = colormap
# use this to update shaders when the displayed dims change
self._current_displayed_dims = None
# track display properties
self.tail_width = tail_width
self.tail_length = tail_length
self.display_id = False
self.display_tail = True
self.display_graph = True
# set the data, properties and graph
self.data = data
self.properties = properties
self.graph = graph or {}
self.color_by = color_by
self.colormap = colormap
self._update_dims()
# reset the display before returning
self._current_displayed_dims = None
|
def __init__(
self,
data,
*,
properties=None,
graph=None,
tail_width=2,
tail_length=30,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending="additive",
visible=True,
colormap="turbo",
color_by="track_id",
colormaps_dict=None,
):
# if not provided with any data, set up an empty layer in 2D+t
if data is None:
data = np.empty((0, 4))
else:
# convert data to a numpy array if it is not already one
data = np.asarray(data)
# in absence of properties make the default an empty dict
if properties is None:
properties = {}
# set the track data dimensions (remove ID from data)
ndim = data.shape[1] - 1
super().__init__(
data,
ndim,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.events.add(
tail_width=Event,
tail_length=Event,
display_id=Event,
display_tail=Event,
display_graph=Event,
color_by=Event,
colormap=Event,
properties=Event,
rebuild_tracks=Event,
rebuild_graph=Event,
)
# track manager deals with data slicing, graph building and properties
self._manager = TrackManager()
self._track_colors = None
self._colormaps_dict = colormaps_dict or {} # additional colormaps
self._color_by = color_by # default color by ID
self._colormap = colormap
# use this to update shaders when the displayed dims change
self._current_displayed_dims = None
# track display properties
self.tail_width = tail_width
self.tail_length = tail_length
self.display_id = False
self.display_tail = True
self.display_graph = True
# set the data, properties and graph
self.data = data
self.properties = properties
self.graph = graph or {}
self.color_by = color_by
self.colormap = colormap
self._update_dims()
# reset the display before returning
self._current_displayed_dims = None
|
https://github.com/napari/napari/issues/1745
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-10-05f52b983105> in <module>
1 v = napari.Viewer()
----> 2 v.add_tracks(tracks, name='ilastik tracks')
<string> in add_tracks(self, data, properties, graph, tail_width, tail_length, name, metadata, scale, translate, opacity, blending, visible, colormap, color_by, colormaps_dict)
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/components/add_layers_mixin.py in add_layer(self, layer)
48 layer.events.cursor_size.connect(self._update_cursor_size)
49 layer.events.data.connect(self._on_layers_change)
---> 50 self.layers.append(layer)
51 self._update_layers(layers=[layer])
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/list/_model.py in append(self, obj)
57 def append(self, obj):
58 TypedList.append(self, obj)
---> 59 self.events.added(item=obj, index=len(self) - 1)
60
61 def pop(self, key):
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in __call__(self, *args, **kwargs)
512 continue
513
--> 514 self._invoke_callback(cb, event)
515 if event.blocked:
516 break
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
533 self.print_callback_errors,
534 self,
--> 535 cb_event=(cb, event),
536 )
537
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
527 def _invoke_callback(self, cb, event):
528 try:
--> 529 cb(event)
530 except Exception:
531 _handle_exception(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_qt/qt_viewer.py in _add_layer(self, event)
281 layers = event.source
282 layer = event.item
--> 283 vispy_layer = create_vispy_visual(layer)
284 vispy_layer.node.parent = self.view.scene
285 vispy_layer.order = len(layers) - 1
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/utils.py in create_vispy_visual(layer)
32 for layer_type, visual in layer_to_visual.items():
33 if isinstance(layer, layer_type):
---> 34 return visual(layer)
35
36 raise TypeError(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in __init__(self, layer)
48 self._reset_base()
49
---> 50 self._on_data_change()
51 self._on_appearance_change()
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in _on_data_change(self, event)
66 self.node.update()
67 # Call to update order of translation values with new dims:
---> 68 self._on_scale_change()
69 self._on_translate_change()
70
AttributeError: 'VispyTracksLayer' object has no attribute '_on_scale_change'
|
AttributeError
|
def _get_state(self):
"""Get dictionary of layer state.
Returns
-------
state : dict
Dictionary of layer state.
"""
state = self._get_base_state()
state.update(
{
"data": self.data,
"properties": self.properties,
"graph": self.graph,
"color_by": self.color_by,
"colormap": self.colormap,
"colormaps_dict": self.colormaps_dict,
"tail_width": self.tail_width,
"tail_length": self.tail_length,
}
)
return state
|
def _get_state(self):
"""Get dictionary of layer state.
Returns
-------
state : dict
Dictionary of layer state.
"""
state = self._get_base_state()
state.update(
{
"data": self.data,
"properties": self.properties,
"graph": self.graph,
"display_id": self.display_id,
"display_tail": self.display_tail,
"display_graph": self.display_graph,
"color_by": self.color_by,
"colormap": self.colormap,
"tail_width": self.tail_width,
"tail_length": self.tail_length,
}
)
return state
|
https://github.com/napari/napari/issues/1745
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-10-05f52b983105> in <module>
1 v = napari.Viewer()
----> 2 v.add_tracks(tracks, name='ilastik tracks')
<string> in add_tracks(self, data, properties, graph, tail_width, tail_length, name, metadata, scale, translate, opacity, blending, visible, colormap, color_by, colormaps_dict)
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/components/add_layers_mixin.py in add_layer(self, layer)
48 layer.events.cursor_size.connect(self._update_cursor_size)
49 layer.events.data.connect(self._on_layers_change)
---> 50 self.layers.append(layer)
51 self._update_layers(layers=[layer])
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/list/_model.py in append(self, obj)
57 def append(self, obj):
58 TypedList.append(self, obj)
---> 59 self.events.added(item=obj, index=len(self) - 1)
60
61 def pop(self, key):
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in __call__(self, *args, **kwargs)
512 continue
513
--> 514 self._invoke_callback(cb, event)
515 if event.blocked:
516 break
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
533 self.print_callback_errors,
534 self,
--> 535 cb_event=(cb, event),
536 )
537
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/utils/events/event.py in _invoke_callback(self, cb, event)
527 def _invoke_callback(self, cb, event):
528 try:
--> 529 cb(event)
530 except Exception:
531 _handle_exception(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_qt/qt_viewer.py in _add_layer(self, event)
281 layers = event.source
282 layer = event.item
--> 283 vispy_layer = create_vispy_visual(layer)
284 vispy_layer.node.parent = self.view.scene
285 vispy_layer.order = len(layers) - 1
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/utils.py in create_vispy_visual(layer)
32 for layer_type, visual in layer_to_visual.items():
33 if isinstance(layer, layer_type):
---> 34 return visual(layer)
35
36 raise TypeError(
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in __init__(self, layer)
48 self._reset_base()
49
---> 50 self._on_data_change()
51 self._on_appearance_change()
52
~/miniconda3/envs/fastai/lib/python3.7/site-packages/napari/_vispy/vispy_tracks_layer.py in _on_data_change(self, event)
66 self.node.update()
67 # Call to update order of translation values with new dims:
---> 68 self._on_scale_change()
69 self._on_translate_change()
70
AttributeError: 'VispyTracksLayer' object has no attribute '_on_scale_change'
|
AttributeError
|
def _update_draw(self, scale_factors, corner_pixels, shape_threshold):
"""Update canvas scale and corner values on draw.
For layer multiscale determing if a new resolution level or tile is
required.
Parameters
----------
scale_factors : list
Scale factors going from canvas to world coordinates.
corner_pixels : array
Coordinates of the top-left and bottom-right canvas pixels in the
world coordinates.
shape_threshold : tuple
Requested shape of field of view in data coordinates.
"""
# Note we ignore the first transform which is tile2data
scale = np.divide(scale_factors, self._transforms[1:].simplified.scale)
data_corners = self._transforms[1:].simplified.inverse(corner_pixels)
self.scale_factor = np.linalg.norm(scale) / np.linalg.norm([1, 1])
# Round and clip data corners
data_corners = np.array(
[np.floor(data_corners[0]), np.ceil(data_corners[1])]
).astype(int)
data_corners = np.clip(data_corners, self._extent_data[0], self._extent_data[1])
if self.dims.ndisplay == 2 and self.multiscale:
level, displayed_corners = compute_multiscale_level_and_corners(
data_corners[:, self.dims.displayed],
shape_threshold,
self.downsample_factors[:, self.dims.displayed],
)
corners = np.zeros((2, self.ndim))
corners[:, self.dims.displayed] = displayed_corners
corners = corners.astype(int)
if self.data_level != level or not np.all(self.corner_pixels == corners):
self._data_level = level
self.corner_pixels = corners
self.refresh()
else:
self.corner_pixels = data_corners
|
def _update_draw(self, scale_factors, corner_pixels, shape_threshold):
"""Update canvas scale and corner values on draw.
For layer multiscale determing if a new resolution level or tile is
required.
Parameters
----------
scale_factors : list
Scale factors going from canvas to world coordinates.
corner_pixels : array
Coordinates of the top-left and bottom-right canvas pixels in the
world coordinates.
shape_threshold : tuple
Requested shape of field of view in data coordinates.
"""
# Note we ignore the first transform which is tile2data
scale = np.divide(scale_factors, self._transforms[1:].simplified.scale)
data_corners = self._transforms[1:].simplified.inverse(corner_pixels)
self.scale_factor = np.linalg.norm(scale) / np.linalg.norm([1, 1])
# Round and clip data corners
data_corners = np.array(
[np.floor(data_corners[0]), np.ceil(data_corners[1])]
).astype(int)
data_corners = np.clip(data_corners, self._extent_data[0], self._extent_data[1])
if self.dims.ndisplay == 2 and self.multiscale:
level, corners = compute_multiscale_level_and_corners(
data_corners,
shape_threshold,
self.downsample_factors,
)
if self.data_level != level or not np.all(self.corner_pixels == corners):
self._data_level = level
self.corner_pixels = corners
self.refresh()
else:
self.corner_pixels = data_corners
|
https://github.com/napari/napari/issues/1717
|
Traceback (most recent call last):
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/vispy/app/backends/_qt.py", line 825, in paintGL
self._vispy_canvas.events.draw(region=None)
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/vispy/util/event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/vispy/util/event.py", line 473, in _invoke_callback
_handle_exception(self.ignore_callback_errors,
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/vispy/util/event.py", line 471, in _invoke_callback
cb(event)
File "/home/jni/projects/napari/napari/_qt/qt_viewer.py", line 673, in on_draw
layer._update_draw(
File "/home/jni/projects/napari/napari/layers/base/base.py", line 771, in _update_draw
level, corners = compute_multiscale_level_and_corners(
File "/home/jni/projects/napari/napari/layers/utils/layer_utils.py", line 268, in compute_multiscale_level_and_corners
level = compute_multiscale_level(
File "/home/jni/projects/napari/napari/layers/utils/layer_utils.py", line 231, in compute_multiscale_level
locations = np.argwhere(np.all(scaled_shape > shape_threshold, axis=1))
ValueError: operands could not be broadcast together with shapes (2,4) (2,)
|
ValueError
|
def __init__(
self,
data,
*,
properties=None,
graph=None,
tail_width=2,
tail_length=30,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending="additive",
visible=True,
colormap="turbo",
color_by="track_id",
colormaps_dict=None,
):
# if not provided with any data, set up an empty layer in 2D+t
if data is None:
data = np.empty((0, 4))
else:
# convert data to a numpy array if it is not already one
data = np.asarray(data)
# in absence of properties make the default an empty dict
if properties is None:
properties = {}
# set the track data dimensions (remove ID from data)
ndim = data.shape[1] - 1
super().__init__(
data,
ndim,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.events.add(
tail_width=Event,
tail_length=Event,
display_id=Event,
display_tail=Event,
display_graph=Event,
color_by=Event,
colormap=Event,
properties=Event,
rebuild_tracks=Event,
rebuild_graph=Event,
)
# track manager deals with data slicing, graph building and properties
self._manager = TrackManager()
self._track_colors = None
self._colormaps_dict = colormaps_dict or {} # additional colormaps
self._color_by = color_by # default color by ID
self._colormap = colormap
# use this to update shaders when the displayed dims change
self._current_displayed_dims = None
# track display properties
self.tail_width = tail_width
self.tail_length = tail_length
self.display_id = False
self.display_tail = True
self.display_graph = True
# set the data, properties and graph
self.data = data
self.properties = properties
self.graph = graph or {}
self.color_by = color_by
self.colormap = colormap
self._update_dims()
# reset the display before returning
self._current_displayed_dims = None
|
def __init__(
self,
data,
*,
properties=None,
graph=None,
tail_width=2,
tail_length=30,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending="additive",
visible=True,
colormap="turbo",
color_by="track_id",
colormaps_dict=None,
):
# if not provided with any data, set up an empty layer in 2D+t
if data is None:
data = np.empty((0, 4))
else:
# convert data to a numpy array if it is not already one
data = np.asarray(data)
# set the track data dimensions (remove ID from data)
ndim = data.shape[1] - 1
super().__init__(
data,
ndim,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.events.add(
tail_width=Event,
tail_length=Event,
display_id=Event,
display_tail=Event,
display_graph=Event,
color_by=Event,
colormap=Event,
properties=Event,
rebuild_tracks=Event,
rebuild_graph=Event,
)
# track manager deals with data slicing, graph building and properties
self._manager = TrackManager()
self._track_colors = None
self._colormaps_dict = colormaps_dict or {} # additional colormaps
self._color_by = color_by # default color by ID
self._colormap = colormap
# use this to update shaders when the displayed dims change
self._current_displayed_dims = None
# track display properties
self.tail_width = tail_width
self.tail_length = tail_length
self.display_id = False
self.display_tail = True
self.display_graph = True
# set the data, properties and graph
self.data = data
self.properties = properties or {}
self.graph = graph or {}
self.color_by = color_by
self.colormap = colormap
self._update_dims()
# reset the display before returning
self._current_displayed_dims = None
|
https://github.com/napari/napari/issues/1706
|
Traceback (most recent call last):
File "/home/quantumjot/napari/napari/_qt/event_loop.py", line 79, in gui_qt
yield app
File "<ipython-input-15-fdca00748db9>", line 4, in <module>
viewer.add_tracks(data, properties=properties)
File "/home/quantumjot/napari/napari/components/add_layers_mixin.py", line 854, in add_tracks
layer = layers.Tracks(
File "/home/quantumjot/napari/napari/layers/tracks/tracks.py", line 144, in __init__
self.properties = properties or {}
File "/home/quantumjot/anaconda3/envs/trackpy/lib/python3.8/site-packages/pandas/core/generic.py", line 1326, in __nonzero__
raise ValueError(
ValueError: The truth value of a DataFrame is ambiguous. Use a.empty, a.bool(), a.item(), a.any() or a.all().
|
ValueError
|
def configure_dask(data) -> Callable[[], ContextManager[dict]]:
"""Spin up cache and return context manager that optimizes Dask indexing.
This function determines whether data is a dask array or list of dask
arrays and prepares some optimizations if so.
When a delayed dask array is given to napari, there are couple things that
need to be done to optimize performance.
1. Opportunistic caching needs to be enabled, such that we don't recompute
(or "re-read") data that has already been computed or read.
2. Dask task fusion must be turned off to prevent napari from triggering
new io on data that has already been read from disk. For example, with a
4D timelapse of 3D stacks, napari may actually *re-read* the entire 3D
tiff file every time the Z plane index is changed. Turning of Dask task
fusion with ``optimization.fuse.active == False`` prevents this.
.. note::
Turning off task fusion requires Dask version 2.15.0 or later.
For background and context, see `napari/napari#718
<https://github.com/napari/napari/issues/718>`_, `napari/napari#1124
<https://github.com/napari/napari/pull/1124>`_, and `dask/dask#6084
<https://github.com/dask/dask/pull/6084>`_.
For details on Dask task fusion, see the documentation on `Dask
Optimization <https://docs.dask.org/en/latest/optimize.html>`_.
Parameters
----------
data : Any
data, as passed to a ``Layer.__init__`` method.
Returns
-------
ContextManager
A context manager that can be used to optimize dask indexing
Examples
--------
>>> data = dask.array.ones((10,10,10))
>>> optimized_slicing = configure_dask(data)
>>> with optimized_slicing():
... data[0, 2].compute()
"""
if _is_dask_data(data):
create_dask_cache() # creates one if it doesn't exist
if dask.__version__ < LooseVersion("2.15.0"):
warnings.warn(
"For best performance with Dask arrays in napari, please "
"upgrade Dask to v2.15.0 or later. Current version is "
f"{dask.__version__}"
)
def dask_optimized_slicing():
with dask.config.set({"optimization.fuse.active": False}) as cfg:
yield cfg
else:
def dask_optimized_slicing():
yield {}
return contextmanager(dask_optimized_slicing)
|
def configure_dask(data) -> Callable[[], ContextManager[dict]]:
"""Spin up cache and return context manager that optimizes Dask indexing.
This function determines whether data is a dask array or list of dask
arrays and prepares some optimizations if so.
When a delayed dask array is given to napari, there are couple things that
need to be done to optimize performance.
1. Opportunistic caching needs to be enabled, such that we don't recompute
(or "re-read") data that has already been computed or read.
2. Dask task fusion must be turned off to prevent napari from triggering
new io on data that has already been read from disk. For example, with a
4D timelapse of 3D stacks, napari may actually *re-read* the entire 3D
tiff file every time the Z plane index is changed. Turning of Dask task
fusion with ``optimization.fuse.active == False`` prevents this.
.. note::
Turning off task fusion requires Dask version 2.15.0 or later.
For background and context, see `napari/napari#718
<https://github.com/napari/napari/issues/718>`_, `napari/napari#1124
<https://github.com/napari/napari/pull/1124>`_, and `dask/dask#6084
<https://github.com/dask/dask/pull/6084>`_.
For details on Dask task fusion, see the documentation on `Dask
Optimization <https://docs.dask.org/en/latest/optimize.html>`_.
Parameters
----------
data : Any
data, as passed to a ``Layer.__init__`` method.
Returns
-------
ContextManager
A context manager that can be used to optimize dask indexing
Examples
--------
>>> data = dask.array.ones((10,10,10))
>>> optimized_slicing = configure_dask(data)
>>> with optimized_slicing():
... data[0, 2].compute()
"""
if _is_dask_data(data):
create_dask_cache() # creates one if it doesn't exist
dask_version = tuple(map(int, dask.__version__.split(".")))
if dask_version < (2, 15, 0):
warnings.warn(
"For best performance with Dask arrays in napari, please "
"upgrade Dask to v2.15.0 or later. Current version is "
f"{dask.__version__}"
)
def dask_optimized_slicing():
with dask.config.set({"optimization.fuse.active": False}) as cfg:
yield cfg
else:
def dask_optimized_slicing():
yield {}
return contextmanager(dask_optimized_slicing)
|
https://github.com/napari/napari/issues/1670
|
~/miniconda/lib/python3.8/site-packages/napari/utils/dask_utils.py in configure_dask(data)
168 if _is_dask_data(data):
169 create_dask_cache() # creates one if it doesn't exist
--> 170 dask_version = tuple(map(int, dask.__version__.split(".")))
171 if dask_version < (2, 15, 0):
172 warnings.warn(
ValueError: invalid literal for int() with base 10: '0+8'
|
ValueError
|
def mouseMoveEvent(self, event):
"""Drag and drop layer with mouse movement.
Parameters
----------
event : qtpy.QtCore.QEvent
Event from the Qt context.
"""
position = np.array([event.pos().x(), event.pos().y()])
distance = np.linalg.norm(position - self._drag_start_position)
if distance < QApplication.startDragDistance() or self._drag_name is None:
return
mimeData = QMimeData()
mimeData.setText(self._drag_name)
drag = QDrag(self)
drag.setMimeData(mimeData)
drag.setHotSpot(event.pos() - self.rect().topLeft())
drag.exec_()
# Check if dragged layer still exists or was deleted during drag
names = [layer.name for layer in self.layers]
dragged_layer_exists = self._drag_name in names
if self._drag_name is not None and dragged_layer_exists:
index = self.layers.index(self._drag_name)
layer = self.layers[index]
self._ensure_visible(layer)
|
def mouseMoveEvent(self, event):
"""Drag and drop layer with mouse movement.
Parameters
----------
event : qtpy.QtCore.QEvent
Event from the Qt context.
"""
position = np.array([event.pos().x(), event.pos().y()])
distance = np.linalg.norm(position - self._drag_start_position)
if distance < QApplication.startDragDistance() or self._drag_name is None:
return
mimeData = QMimeData()
mimeData.setText(self._drag_name)
drag = QDrag(self)
drag.setMimeData(mimeData)
drag.setHotSpot(event.pos() - self.rect().topLeft())
drag.exec_()
if self._drag_name is not None:
index = self.layers.index(self._drag_name)
layer = self.layers[index]
self._ensure_visible(layer)
|
https://github.com/napari/napari/issues/1484
|
Traceback (most recent call last):
File "c:\users\hectormz\code\contribute\napari\napari\_qt\qt_layerlist.py", line 307, in mouseMoveEvent
index = self.layers.index(self._drag_name)
File "c:\users\hectormz\code\contribute\napari\napari\utils\list\_typed.py", line 82, in index
raise KeyError(f'could not find element {q} was referencing')
KeyError: 'could not find element stack was referencing'
|
KeyError
|
def __init__(self, title="napari", ndisplay=2, order=None, axis_labels=None):
super().__init__()
self.events = EmitterGroup(
source=self,
auto_connect=True,
status=Event,
help=Event,
title=Event,
interactive=Event,
cursor=Event,
reset_view=Event,
active_layer=Event,
palette=Event,
grid=Event,
layers_change=Event,
)
self.dims = Dims(ndim=None, ndisplay=ndisplay, order=order, axis_labels=axis_labels)
self.layers = LayerList()
self._status = "Ready"
self._help = ""
self._title = title
self._cursor = "standard"
self._cursor_size = None
self._interactive = True
self._active_layer = None
self._grid_size = (1, 1)
self.grid_stride = 1
self._palette = None
self.theme = "dark"
self.dims.events.camera.connect(self.reset_view)
self.dims.events.ndisplay.connect(self._update_layers)
self.dims.events.order.connect(self._update_layers)
self.dims.events.axis.connect(self._update_layers)
self.layers.events.changed.connect(self._update_active_layer)
self.layers.events.changed.connect(self._update_grid)
self.layers.events.changed.connect(self._on_layers_change)
self.keymap_providers = [self]
# Hold callbacks for when mouse moves with nothing pressed
self.mouse_move_callbacks = []
# Hold callbacks for when mouse is pressed, dragged, and released
self.mouse_drag_callbacks = []
self._persisted_mouse_event = {}
self._mouse_drag_gen = {}
|
def __init__(self, title="napari", ndisplay=2, order=None, axis_labels=None):
super().__init__()
self.events = EmitterGroup(
source=self,
auto_connect=True,
status=Event,
help=Event,
title=Event,
interactive=Event,
cursor=Event,
reset_view=Event,
active_layer=Event,
palette=Event,
grid=Event,
layers_change=Event,
)
self.dims = Dims(ndim=None, ndisplay=ndisplay, order=order, axis_labels=axis_labels)
self.layers = LayerList()
self._status = "Ready"
self._help = ""
self._title = title
self._cursor = "standard"
self._cursor_size = None
self._interactive = True
self._active_layer = None
self._grid_size = (1, 1)
self.grid_stride = 1
self._palette = None
self.theme = "dark"
self.dims.events.camera.connect(self.reset_view)
self.dims.events.ndisplay.connect(self._update_layers)
self.dims.events.order.connect(self._update_layers)
self.dims.events.axis.connect(self._update_layers)
self.layers.events.changed.connect(self._on_layers_change)
self.layers.events.changed.connect(self._update_active_layer)
self.layers.events.changed.connect(self._update_grid)
self.keymap_providers = [self]
# Hold callbacks for when mouse moves with nothing pressed
self.mouse_move_callbacks = []
# Hold callbacks for when mouse is pressed, dragged, and released
self.mouse_drag_callbacks = []
self._persisted_mouse_event = {}
self._mouse_drag_gen = {}
|
https://github.com/napari/napari/issues/1323
|
Traceback (most recent call last):
File "/Users/cjw/Code/napari/napari/_qt/qt_viewer_buttons.py", line 148, in <lambda>
self.clicked.connect(lambda: self.viewer.layers.remove_selected())
File "/Users/cjw/Code/napari/napari/components/layerlist.py", line 139, in remove_selected
self.pop(i)
File "/Users/cjw/Code/napari/napari/utils/list/_model.py", line 64, in pop
self.events.removed(item=obj, index=key)
File "/Users/cjw/Code/napari/napari/utils/event.py", line 508, in __call__
self._invoke_callback(cb, event)
File "/Users/cjw/Code/napari/napari/utils/event.py", line 529, in _invoke_callback
cb_event=(cb, event),
File "/Users/cjw/Code/napari/napari/utils/event.py", line 523, in _invoke_callback
cb(event)
File "/Users/cjw/Code/napari/napari/utils/event.py", line 508, in __call__
self._invoke_callback(cb, event)
File "/Users/cjw/Code/napari/napari/utils/event.py", line 529, in _invoke_callback
cb_event=(cb, event),
File "/Users/cjw/Code/napari/napari/utils/event.py", line 523, in _invoke_callback
cb(event)
File "/Users/cjw/Code/napari/napari/components/viewer_model.py", line 513, in _update_grid
stride=self.grid_stride,
File "/Users/cjw/Code/napari/napari/components/viewer_model.py", line 500, in grid_view
self._subplot(layer, (i_row, i_column))
File "/Users/cjw/Code/napari/napari/components/viewer_model.py", line 528, in _subplot
scene_size, corner = self._scene_shape()
File "/Users/cjw/Code/napari/napari/components/viewer_model.py", line 266, in _scene_shape
size = [size[i] for i in self.dims.displayed]
File "/Users/cjw/Code/napari/napari/components/viewer_model.py", line 266, in <listcomp>
size = [size[i] for i in self.dims.displayed]
IndexError: index 4 is out of bounds for axis 0 with size 4
|
IndexError
|
def select(layer, event):
"""Select shapes or vertices either in select or direct select mode.
Once selected shapes can be moved or resized, and vertices can be moved
depending on the mode. Holding shift when resizing a shape will preserve
the aspect ratio.
"""
shift = "Shift" in event.modifiers
# on press
layer._moving_value = copy(layer._value)
shape_under_cursor, vertex_under_cursor = layer._value
if vertex_under_cursor is None:
if shift and shape_under_cursor is not None:
if shape_under_cursor in layer.selected_data:
layer.selected_data.remove(shape_under_cursor)
shapes = layer.selected_data
layer._selected_box = layer.interaction_box(shapes)
else:
layer.selected_data.add(shape_under_cursor)
shapes = layer.selected_data
layer._selected_box = layer.interaction_box(shapes)
elif shape_under_cursor is not None:
if shape_under_cursor not in layer.selected_data:
layer.selected_data = {shape_under_cursor}
else:
layer.selected_data = set()
layer._set_highlight()
yield
# on move
while event.type == "mouse_move":
# Drag any selected shapes
layer._move(layer.displayed_coordinates)
yield
# on release
shift = "Shift" in event.modifiers
if not layer._is_moving and not layer._is_selecting and not shift:
if shape_under_cursor is not None:
layer.selected_data = {shape_under_cursor}
else:
layer.selected_data = set()
elif layer._is_selecting:
layer.selected_data = layer._data_view.shapes_in_box(layer._drag_box)
layer._is_selecting = False
layer._set_highlight()
layer._is_moving = False
layer._drag_start = None
layer._drag_box = None
layer._fixed_vertex = None
layer._moving_value = (None, None)
layer._set_highlight()
layer._update_thumbnail()
|
def select(layer, event):
"""Select shapes or vertices either in select or direct select mode.
Once selected shapes can be moved or resized, and vertices can be moved
depending on the mode. Holding shift when resizing a shape will preserve
the aspect ratio.
"""
shift = "Shift" in event.modifiers
# on press
layer._moving_value = copy(layer._value)
shape_under_cursor, vertex_under_cursor = layer._value
if vertex_under_cursor is None:
if shift and shape_under_cursor is not None:
if shape_under_cursor in layer.selected_data:
layer.selected_data.remove(shape_under_cursor)
shapes = layer.selected_data
layer._selected_box = layer.interaction_box(shapes)
else:
layer.selected_data.append(shape_under_cursor)
shapes = layer.selected_data
layer._selected_box = layer.interaction_box(shapes)
elif shape_under_cursor is not None:
if shape_under_cursor not in layer.selected_data:
layer.selected_data = {shape_under_cursor}
else:
layer.selected_data = set()
layer._set_highlight()
yield
# on move
while event.type == "mouse_move":
# Drag any selected shapes
layer._move(layer.displayed_coordinates)
yield
# on release
shift = "Shift" in event.modifiers
if not layer._is_moving and not layer._is_selecting and not shift:
if shape_under_cursor is not None:
layer.selected_data = {shape_under_cursor}
else:
layer.selected_data = set()
elif layer._is_selecting:
layer.selected_data = layer._data_view.shapes_in_box(layer._drag_box)
layer._is_selecting = False
layer._set_highlight()
layer._is_moving = False
layer._drag_start = None
layer._drag_box = None
layer._fixed_vertex = None
layer._moving_value = (None, None)
layer._set_highlight()
layer._update_thumbnail()
|
https://github.com/napari/napari/issues/1285
|
Traceback (most recent call last):
File "/Users/yamauc0000/Documents/napari/.venv/lib/python3.7/site-packages/vispy/app/backends/_qt.py", line 447, in mousePressEvent
modifiers=self._modifiers(ev),
File "/Users/yamauc0000/Documents/napari/.venv/lib/python3.7/site-packages/vispy/app/base.py", line 181, in _vispy_mouse_press
ev = self._vispy_canvas.events.mouse_press(**kwargs)
File "/Users/yamauc0000/Documents/napari/.venv/lib/python3.7/site-packages/vispy/util/event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "/Users/yamauc0000/Documents/napari/.venv/lib/python3.7/site-packages/vispy/util/event.py", line 475, in _invoke_callback
self, cb_event=(cb, event))
File "/Users/yamauc0000/Documents/napari/.venv/lib/python3.7/site-packages/vispy/util/event.py", line 471, in _invoke_callback
cb(event)
File "/Users/yamauc0000/Documents/napari/napari/_qt/qt_viewer.py", line 501, in on_mouse_press
mouse_press_callbacks(layer, event)
File "/Users/yamauc0000/Documents/napari/napari/utils/interactions.py", line 57, in mouse_press_callbacks
next(gen)
File "/Users/yamauc0000/Documents/napari/napari/layers/shapes/_shapes_mouse_bindings.py", line 31, in select
layer.selected_data.append(shape_under_cursor)
AttributeError: 'set' object has no attribute 'append'
|
AttributeError
|
def create_worker(
func: Callable,
*args,
_start_thread: Optional[bool] = None,
_connect: Optional[Dict[str, Union[Callable, Sequence[Callable]]]] = None,
_worker_class: Optional[Type[WorkerBase]] = None,
_ignore_errors: bool = False,
**kwargs,
) -> WorkerBase:
"""Convenience function to start a function in another thread.
By default, uses :class:`Worker`, but a custom ``WorkerBase`` subclass may
be provided. If so, it must be a subclass of :class:`Worker`, which
defines a standard set of signals and a run method.
Parameters
----------
func : Callable
The function to call in another thread.
_start_thread : bool, optional
Whether to immediaetly start the thread. If False, the returned worker
must be manually started with ``worker.start()``. by default it will be
``False`` if the ``_connect`` argument is ``None``, otherwise ``True``.
_connect : Dict[str, Union[Callable, Sequence]], optional
A mapping of ``"signal_name"`` -> ``callable`` or list of ``callable``:
callback functions to connect to the various signals offered by the
worker class. by default None
_worker_class : Type[WorkerBase], optional
The :class`WorkerBase` to instantiate, by default
:class:`FunctionWorker` will be used if ``func`` is a regular function,
and :class:`GeneratorWorker` will be used if it is a generator.
_ignore_errors : bool, optional
If ``False`` (the default), errors raised in the other thread will be
reraised in the main thread (makes debugging significantly easier).
*args
will be passed to ``func``
**kwargs
will be passed to ``func``
Returns
-------
worker : WorkerBase
An instantiated worker. If ``_start_thread`` was ``False``, the worker
will have a `.start()` method that can be used to start the thread.
Raises
------
TypeError
If a worker_class is provided that is not a subclass of WorkerBase.
TypeError
If _connect is provided and is not a dict of ``{str: callable}``
Examples
--------
.. code-block:: python
def long_function(duration):
import time
time.sleep(duration)
worker = create_worker(long_function, 10)
"""
if not _worker_class:
if inspect.isgeneratorfunction(func):
_worker_class = GeneratorWorker
else:
_worker_class = FunctionWorker
if not (inspect.isclass(_worker_class) and issubclass(_worker_class, WorkerBase)):
raise TypeError(f"Worker {_worker_class} must be a subclass of WorkerBase")
worker = _worker_class(func, *args, **kwargs)
if _connect is not None:
if not isinstance(_connect, dict):
raise TypeError("The '_connect' argument must be a dict")
if _start_thread is None:
_start_thread = True
for key, val in _connect.items():
_val = val if isinstance(val, (tuple, list)) else [val]
for v in _val:
if not callable(v):
raise TypeError(
f'"_connect[{key!r}]" must be a function or '
"sequence of functions"
)
getattr(worker, key).connect(v)
# if the user has not provided a default connection for the "errored"
# signal... and they have not explicitly set ``ignore_errors=True``
# Then rereaise any errors from the thread.
if not _ignore_errors and not (_connect or {}).get("errored", False):
def reraise(e):
raise e
worker.errored.connect(reraise)
if _start_thread:
worker.start()
return worker
|
def create_worker(
func: Callable,
*args,
_start_thread: Optional[bool] = None,
_connect: Optional[Dict[str, Union[Callable, Sequence[Callable]]]] = None,
_worker_class: Optional[Type[WorkerBase]] = None,
_ignore_errors: bool = False,
**kwargs,
) -> WorkerBase:
"""Convenience function to start a function in another thread.
By default, uses :class:`Worker`, but a custom ``WorkerBase`` subclass may
be provided. If so, it must be a subclass of :class:`Worker`, which
defines a standard set of signals and a run method.
Parameters
----------
func : Callable
The function to call in another thread.
_start_thread : bool, optional
Whether to immediaetly start the thread. If False, the returned worker
must be manually started with ``worker.start()``. by default it will be
``False`` if the ``_connect`` argument is ``None``, otherwise ``True``.
_connect : Dict[str, Union[Callable, Sequence]], optional
A mapping of ``"signal_name"`` -> ``callable`` or list of ``callable``:
callback functions to connect to the various signals offered by the
worker class. by default None
_worker_class : Type[WorkerBase], optional
The :class`WorkerBase` to instantiate, by default
:class:`FunctionWorker` will be used if ``func`` is a regular function,
and :class:`GeneratorWorker` will be used if it is a generator.
_ignore_errors : bool, optional
If ``False`` (the default), errors raised in the other thread will be
reraised in the main thread (makes debugging significantly easier).
*args
will be passed to ``func``
**kwargs
will be passed to ``func``
Returns
-------
worker : WorkerBase
An instantiated worker. If ``_start_thread`` was ``False``, the worker
will have a `.start()` method that can be used to start the thread.
Raises
------
TypeError
If a worker_class is provided that is not a subclass of WorkerBase.
TypeError
If _connect is provided and is not a dict of ``{str: callable}``
Examples
--------
.. code-block:: python
def long_function(duration):
import time
time.sleep(duration)
worker = create_worker(long_function, 10)
"""
if not _worker_class:
if inspect.isgeneratorfunction(func):
_worker_class = GeneratorWorker
else:
_worker_class = FunctionWorker
if not (inspect.isclass(_worker_class) and issubclass(_worker_class, WorkerBase)):
raise TypeError(f"Worker {_worker_class} must be a subclass of WorkerBase")
worker = _worker_class(func, *args, **kwargs)
if _connect is not None:
if not isinstance(_connect, dict):
raise TypeError("The '_connect' argument must be a dict")
if _start_thread is None:
_start_thread = True
for key, val in _connect.items():
if not isinstance(val, (tuple, list)):
_val = [val]
for v in _val:
if not callable(v):
raise TypeError(
f'"_connect[{key!r}]" must be a function or '
"sequence of functions"
)
getattr(worker, key).connect(v)
# if the user has not provided a default connection for the "errored"
# signal... and they have not explicitly set ``ignore_errors=True``
# Then rereaise any errors from the thread.
if not _ignore_errors and not (_connect or {}).get("errored", False):
def reraise(e):
raise e
worker.errored.connect(reraise)
if _start_thread:
worker.start()
return worker
|
https://github.com/napari/napari/issues/1292
|
-------------------------------------------------------------------------
UnboundLocalError Traceback (most recent call last)
<ipython-input-23-1749d5f75cac> in <module>
52
53 viewer.window.add_dock_widget(loss_canvas)
---> 54 worker = train(model, data_loader, 500)
~/projects/napari/napari/_qt/threading.py in worker_function(*args, **kwargs)
628 kwargs['_worker_class'] = kwargs.get('_worker_class', worker_class)
629 kwargs['_ignore_errors'] = kwargs.get('_ignore_errors', ignore_errors)
--> 630 return create_worker(function, *args, **kwargs,)
631
632 return worker_function
~/projects/napari/napari/_qt/threading.py in create_worker(func, _start_thread, _connect, _worker_class, _ignore_errors, *args, **kwargs)
505 if not isinstance(val, (tuple, list)):
506 _val = [val]
--> 507 for v in _val:
508 if not callable(v):
509 raise TypeError(
UnboundLocalError: local variable '_val' referenced before assignment
|
UnboundLocalError
|
def add_image(
self,
data=None,
*,
channel_axis=None,
rgb=None,
colormap=None,
contrast_limits=None,
gamma=1,
interpolation="nearest",
rendering="mip",
iso_threshold=0.5,
attenuation=0.5,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending=None,
visible=True,
multiscale=None,
) -> Union[layers.Image, List[layers.Image]]:
"""Add an image layer to the layers list.
Parameters
----------
data : array or list of array
Image data. Can be N dimensional. If the last dimension has length
3 or 4 can be interpreted as RGB or RGBA if rgb is `True`. If a
list and arrays are decreasing in shape then the data is treated as
a multiscale image.
channel_axis : int, optional
Axis to expand image along. If provided, each channel in the data
will be added as an individual image layer. In channel_axis mode,
all other parameters MAY be provided as lists, and the Nth value
will be applied to the Nth channel in the data. If a single value
is provided, it will be broadcast to all Layers.
rgb : bool or list
Whether the image is rgb RGB or RGBA. If not specified by user and
the last dimension of the data has length 3 or 4 it will be set as
`True`. If `False` the image is interpreted as a luminance image.
If a list then must be same length as the axis that is being
expanded as channels.
colormap : str, vispy.Color.Colormap, tuple, dict, list
Colormaps to use for luminance images. If a string must be the name
of a supported colormap from vispy or matplotlib. If a tuple the
first value must be a string to assign as a name to a colormap and
the second item must be a Colormap. If a dict the key must be a
string to assign as a name to a colormap and the value must be a
Colormap. If a list then must be same length as the axis that is
being expanded as channels, and each colormap is applied to each
new image layer.
contrast_limits : list (2,)
Color limits to be used for determining the colormap bounds for
luminance images. If not passed is calculated as the min and max of
the image. If list of lists then must be same length as the axis
that is being expanded and then each colormap is applied to each
image.
gamma : list, float
Gamma correction for determining colormap linearity. Defaults to 1.
If a list then must be same length as the axis that is being
expanded as channels.
interpolation : str or list
Interpolation mode used by vispy. Must be one of our supported
modes. If a list then must be same length as the axis that is being
expanded as channels.
rendering : str or list
Rendering mode used by vispy. Must be one of our supported
modes. If a list then must be same length as the axis that is being
expanded as channels.
iso_threshold : float or list
Threshold for isosurface. If a list then must be same length as the
axis that is being expanded as channels.
attenuation : float or list
Attenuation rate for attenuated maximum intensity projection. If a
list then must be same length as the axis that is being expanded as
channels.
name : str or list of str
Name of the layer. If a list then must be same length as the axis
that is being expanded as channels.
metadata : dict or list of dict
Layer metadata. If a list then must be a list of dicts with the
same length as the axis that is being expanded as channels.
scale : tuple of float or list
Scale factors for the layer. If a list then must be a list of
tuples of float with the same length as the axis that is being
expanded as channels.
translate : tuple of float or list
Translation values for the layer. If a list then must be a list of
tuples of float with the same length as the axis that is being
expanded as channels.
opacity : float or list
Opacity of the layer visual, between 0.0 and 1.0. If a list then
must be same length as the axis that is being expanded as channels.
blending : str or list
One of a list of preset blending modes that determines how RGB and
alpha values of the layer visual get mixed. Allowed values are
{'opaque', 'translucent', and 'additive'}. If a list then
must be same length as the axis that is being expanded as channels.
visible : bool or list of bool
Whether the layer visual is currently being displayed.
If a list then must be same length as the axis that is
being expanded as channels.
multiscale : bool
Whether the data is a multiscale image or not. Multiscale data is
represented by a list of array like image data. If not specified by
the user and if the data is a list of arrays that decrease in shape
then it will be taken to be multiscale. The first image in the list
should be the largest.
Returns
-------
layer : :class:`napari.layers.Image` or list
The newly-created image layer or list of image layers.
"""
if colormap is not None:
# standardize colormap argument(s) to strings, and make sure they
# are in AVAILABLE_COLORMAPS. This will raise one of many various
# errors if the colormap argument is invalid. See
# ensure_colormap_tuple for details
if isinstance(colormap, list):
colormap = [ensure_colormap_tuple(c)[0] for c in colormap]
else:
colormap, _ = ensure_colormap_tuple(colormap)
# doing this here for IDE/console autocompletion in add_image function.
kwargs = {
"rgb": rgb,
"colormap": colormap,
"contrast_limits": contrast_limits,
"gamma": gamma,
"interpolation": interpolation,
"rendering": rendering,
"iso_threshold": iso_threshold,
"attenuation": attenuation,
"name": name,
"metadata": metadata,
"scale": scale,
"translate": translate,
"opacity": opacity,
"blending": blending,
"visible": visible,
"multiscale": multiscale,
}
# these arguments are *already* iterables in the single-channel case.
iterable_kwargs = {"scale", "translate", "contrast_limits", "metadata"}
if channel_axis is None:
kwargs["colormap"] = kwargs["colormap"] or "gray"
kwargs["blending"] = kwargs["blending"] or "translucent"
# Helpful message if someone tries to add mulit-channel kwargs,
# but forget the channel_axis arg
for k, v in kwargs.items():
if k not in iterable_kwargs and is_sequence(v):
raise TypeError(
f"Received sequence for argument '{k}', "
"did you mean to specify a 'channel_axis'? "
)
return self.add_layer(layers.Image(data, **kwargs))
else:
# Determine if data is a multiscale
if multiscale is None:
multiscale, data = guess_multiscale(data)
n_channels = (data[0] if multiscale else data).shape[channel_axis]
kwargs["blending"] = kwargs["blending"] or "additive"
# turn the kwargs dict into a mapping of {key: iterator}
# so that we can use {k: next(v) for k, v in kwargs.items()} below
for key, val in kwargs.items():
if key == "colormap" and val is None:
if n_channels == 1:
kwargs[key] = iter(["gray"])
elif n_channels == 2:
kwargs[key] = iter(colormaps.MAGENTA_GREEN)
else:
kwargs[key] = itertools.cycle(colormaps.CYMRGB)
# make sure that iterable_kwargs are a *sequence* of iterables
# for the multichannel case. For example: if scale == (1, 2) &
# n_channels = 3, then scale should == [(1, 2), (1, 2), (1, 2)]
elif key in iterable_kwargs:
kwargs[key] = iter(ensure_sequence_of_iterables(val, n_channels))
else:
kwargs[key] = iter(ensure_iterable(val))
layer_list = []
for i in range(n_channels):
if multiscale:
image = [
np.take(data[j], i, axis=channel_axis) for j in range(len(data))
]
else:
image = np.take(data, i, axis=channel_axis)
i_kwargs = {k: next(v) for k, v in kwargs.items()}
layer = self.add_layer(layers.Image(image, **i_kwargs))
layer_list.append(layer)
return layer_list
|
def add_image(
self,
data=None,
*,
channel_axis=None,
rgb=None,
colormap=None,
contrast_limits=None,
gamma=1,
interpolation="nearest",
rendering="mip",
iso_threshold=0.5,
attenuation=0.5,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending=None,
visible=True,
multiscale=None,
) -> Union[layers.Image, List[layers.Image]]:
"""Add an image layer to the layers list.
Parameters
----------
data : array or list of array
Image data. Can be N dimensional. If the last dimension has length
3 or 4 can be interpreted as RGB or RGBA if rgb is `True`. If a
list and arrays are decreasing in shape then the data is treated as
a multiscale image.
channel_axis : int, optional
Axis to expand image along. If provided, each channel in the data
will be added as an individual image layer. In channel_axis mode,
all other parameters MAY be provided as lists, and the Nth value
will be applied to the Nth channel in the data. If a single value
is provided, it will be broadcast to all Layers.
rgb : bool or list
Whether the image is rgb RGB or RGBA. If not specified by user and
the last dimension of the data has length 3 or 4 it will be set as
`True`. If `False` the image is interpreted as a luminance image.
If a list then must be same length as the axis that is being
expanded as channels.
colormap : str, vispy.Color.Colormap, tuple, dict, list
Colormaps to use for luminance images. If a string must be the name
of a supported colormap from vispy or matplotlib. If a tuple the
first value must be a string to assign as a name to a colormap and
the second item must be a Colormap. If a dict the key must be a
string to assign as a name to a colormap and the value must be a
Colormap. If a list then must be same length as the axis that is
being expanded as channels, and each colormap is applied to each
new image layer.
contrast_limits : list (2,)
Color limits to be used for determining the colormap bounds for
luminance images. If not passed is calculated as the min and max of
the image. If list of lists then must be same length as the axis
that is being expanded and then each colormap is applied to each
image.
gamma : list, float
Gamma correction for determining colormap linearity. Defaults to 1.
If a list then must be same length as the axis that is being
expanded as channels.
interpolation : str or list
Interpolation mode used by vispy. Must be one of our supported
modes. If a list then must be same length as the axis that is being
expanded as channels.
rendering : str or list
Rendering mode used by vispy. Must be one of our supported
modes. If a list then must be same length as the axis that is being
expanded as channels.
iso_threshold : float or list
Threshold for isosurface. If a list then must be same length as the
axis that is being expanded as channels.
attenuation : float or list
Attenuation rate for attenuated maximum intensity projection. If a
list then must be same length as the axis that is being expanded as
channels.
name : str or list of str
Name of the layer. If a list then must be same length as the axis
that is being expanded as channels.
metadata : dict or list of dict
Layer metadata. If a list then must be a list of dicts with the
same length as the axis that is being expanded as channels.
scale : tuple of float or list
Scale factors for the layer. If a list then must be a list of
tuples of float with the same length as the axis that is being
expanded as channels.
translate : tuple of float or list
Translation values for the layer. If a list then must be a list of
tuples of float with the same length as the axis that is being
expanded as channels.
opacity : float or list
Opacity of the layer visual, between 0.0 and 1.0. If a list then
must be same length as the axis that is being expanded as channels.
blending : str or list
One of a list of preset blending modes that determines how RGB and
alpha values of the layer visual get mixed. Allowed values are
{'opaque', 'translucent', and 'additive'}. If a list then
must be same length as the axis that is being expanded as channels.
visible : bool or list of bool
Whether the layer visual is currently being displayed.
If a list then must be same length as the axis that is
being expanded as channels.
multiscale : bool
Whether the data is a multiscale image or not. Multiscale data is
represented by a list of array like image data. If not specified by
the user and if the data is a list of arrays that decrease in shape
then it will be taken to be multiscale. The first image in the list
should be the largest.
Returns
-------
layer : :class:`napari.layers.Image` or list
The newly-created image layer or list of image layers.
"""
if colormap is not None:
# standardize colormap argument(s) to strings, and make sure they
# are in AVAILABLE_COLORMAPS. This will raise one of many various
# errors if the colormap argument is invalid. See
# ensure_colormap_tuple for details
if isinstance(colormap, list):
colormap = [ensure_colormap_tuple(c)[0] for c in colormap]
else:
colormap, _ = ensure_colormap_tuple(colormap)
# doing this here for IDE/console autocompletion in add_image function.
kwargs = {
"rgb": rgb,
"colormap": colormap,
"contrast_limits": contrast_limits,
"gamma": gamma,
"interpolation": interpolation,
"rendering": rendering,
"iso_threshold": iso_threshold,
"attenuation": attenuation,
"name": name,
"metadata": metadata,
"scale": scale,
"translate": translate,
"opacity": opacity,
"blending": blending,
"visible": visible,
"multiscale": multiscale,
}
# these arguments are *already* iterables in the single-channel case.
iterable_kwargs = {"scale", "translate", "contrast_limits", "metadata"}
if channel_axis is None:
kwargs["colormap"] = kwargs["colormap"] or "gray"
kwargs["blending"] = kwargs["blending"] or "translucent"
# Helpful message if someone tries to add mulit-channel kwargs,
# but forget the channel_axis arg
for k, v in kwargs.items():
if k not in iterable_kwargs and is_sequence(v):
raise TypeError(
f"Received sequence for argument '{k}', "
"did you mean to specify a 'channel_axis'? "
)
return self.add_layer(layers.Image(data, **kwargs))
else:
# Determine if data is a multiscale
if multiscale is None:
multiscale = guess_multiscale(data)
n_channels = (data[0] if multiscale else data).shape[channel_axis]
kwargs["blending"] = kwargs["blending"] or "additive"
# turn the kwargs dict into a mapping of {key: iterator}
# so that we can use {k: next(v) for k, v in kwargs.items()} below
for key, val in kwargs.items():
if key == "colormap" and val is None:
if n_channels == 1:
kwargs[key] = iter(["gray"])
elif n_channels == 2:
kwargs[key] = iter(colormaps.MAGENTA_GREEN)
else:
kwargs[key] = itertools.cycle(colormaps.CYMRGB)
# make sure that iterable_kwargs are a *sequence* of iterables
# for the multichannel case. For example: if scale == (1, 2) &
# n_channels = 3, then scale should == [(1, 2), (1, 2), (1, 2)]
elif key in iterable_kwargs:
kwargs[key] = iter(ensure_sequence_of_iterables(val, n_channels))
else:
kwargs[key] = iter(ensure_iterable(val))
layer_list = []
for i in range(n_channels):
if multiscale:
image = [
np.take(data[j], i, axis=channel_axis) for j in range(len(data))
]
else:
image = np.take(data, i, axis=channel_axis)
i_kwargs = {k: next(v) for k, v in kwargs.items()}
layer = self.add_layer(layers.Image(image, **i_kwargs))
layer_list.append(layer)
return layer_list
|
https://github.com/napari/napari/issues/1231
|
~/conda/envs/all/lib/python3.8/site-packages/napari/layers/image/image.py in __init__(self, data, rgb, colormap, contrast_limits, gamma, interpolation, rendering, iso_threshold, attenuation, name, metadata, scale, translate, opacity, blending, visible, multiscale)
162 init_shape = data[0].shape
163 else:
--> 164 init_shape = data.shape
165
166 # Determine if rgb
AttributeError: 'list' object has no attribute 'shape'
|
AttributeError
|
def guess_multiscale(data):
"""Guess whether the passed data is multiscale, process it accordingly.
If shape of arrays along first axis is strictly decreasing, the data is
multiscale. If it is the same shape everywhere, it is not. Various
ambiguous conditions in between will result in a ValueError being raised,
or in an "unwrapping" of data, if data contains only one element.
Parameters
----------
data : array or list of array
Data that should be checked.
Returns
-------
multiscale : bool
True if the data is thought to be multiscale, False otherwise.
data : list or array
The input data, perhaps with the leading axis removed.
"""
# If the data has ndim and is not one-dimensional then cannot be multiscale
# If data is a zarr array, this check ensure that subsets of it are not
# instantiated. (`for d in data` instantiates `d` as a NumPy array if
# `data` is a zarr array.)
if hasattr(data, "ndim") and data.ndim > 1:
return False, data
shapes = [d.shape for d in data]
sizes = np.array([np.prod(shape, dtype=np.uint64) for shape in shapes])
if len(sizes) == 1 and (isinstance(data, list) or isinstance(data, tuple)):
# pyramid with only one level, unwrap
return False, data[0]
if len(sizes) > 1:
consistent = bool(np.all(sizes[:-1] > sizes[1:]))
flat = bool(np.all(sizes == sizes[0]))
if flat:
# note: the individual array case should be caught by the first
# code line in this function, hasattr(ndim) and ndim > 1.
raise ValueError(
"Input data should be an array-like object, or a sequence of "
"arrays of decreasing size. Got arrays of single shape: "
f"{shapes[0]}"
)
if not consistent:
raise ValueError(
"Input data should be an array-like object, or a sequence of "
"arrays of decreasing size. Got arrays in incorrect order, "
f"shapes: {shapes}"
)
return True, data
else:
return False, data
|
def guess_multiscale(data):
"""Guess if the passed data is multiscale of not.
If shape of arrays along first axis is strictly decreasing.
Parameters
----------
data : array or list of array
Data that should be checked.
Returns
-------
bool
If data is multiscale or not.
"""
# If the data has ndim and is not one-dimensional then cannot be multiscale
if hasattr(data, "ndim") and data.ndim > 1:
return False
size = np.array([np.prod(d.shape, dtype=np.uint64) for d in data])
if len(size) > 1:
return bool(np.all(size[:-1] > size[1:]))
else:
return False
|
https://github.com/napari/napari/issues/1231
|
~/conda/envs/all/lib/python3.8/site-packages/napari/layers/image/image.py in __init__(self, data, rgb, colormap, contrast_limits, gamma, interpolation, rendering, iso_threshold, attenuation, name, metadata, scale, translate, opacity, blending, visible, multiscale)
162 init_shape = data[0].shape
163 else:
--> 164 init_shape = data.shape
165
166 # Determine if rgb
AttributeError: 'list' object has no attribute 'shape'
|
AttributeError
|
def __init__(
self,
data,
*,
rgb=None,
colormap="gray",
contrast_limits=None,
gamma=1,
interpolation="nearest",
rendering="mip",
iso_threshold=0.5,
attenuation=0.5,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending="translucent",
visible=True,
multiscale=None,
):
if isinstance(data, types.GeneratorType):
data = list(data)
# Determine if data is a multiscale
if multiscale is None:
multiscale, data = guess_multiscale(data)
# Determine initial shape
if multiscale:
init_shape = data[0].shape
else:
init_shape = data.shape
# Determine if rgb
if rgb is None:
rgb = guess_rgb(init_shape)
# Determine dimensionality of the data
if rgb:
ndim = len(init_shape) - 1
else:
ndim = len(init_shape)
super().__init__(
data,
ndim,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
multiscale=multiscale,
)
self.events.add(
interpolation=Event,
rendering=Event,
iso_threshold=Event,
attenuation=Event,
)
# Set data
self.rgb = rgb
self._data = data
if self.multiscale:
self._data_level = len(self.data) - 1
# Determine which level of the multiscale to use for the thumbnail.
# Pick the smallest level with at least one axis >= 64. This is
# done to prevent the thumbnail from being from one of the very
# low resolution layers and therefore being very blurred.
big_enough_levels = [np.any(np.greater_equal(p.shape, 64)) for p in data]
if np.any(big_enough_levels):
self._thumbnail_level = np.where(big_enough_levels)[0][-1]
else:
self._thumbnail_level = 0
else:
self._data_level = 0
self._thumbnail_level = 0
self.corner_pixels[1] = self.level_shapes[self._data_level]
# Intitialize image views and thumbnails with zeros
if self.rgb:
self._data_view = np.zeros((1,) * self.dims.ndisplay + (self.shape[-1],))
else:
self._data_view = np.zeros((1,) * self.dims.ndisplay)
self._data_raw = self._data_view
self._data_thumbnail = self._data_view
# Set contrast_limits and colormaps
self._gamma = gamma
self._iso_threshold = iso_threshold
self._attenuation = attenuation
if contrast_limits is None:
self.contrast_limits_range = self._calc_data_range()
else:
self.contrast_limits_range = contrast_limits
self._contrast_limits = tuple(self.contrast_limits_range)
self.colormap = colormap
self.contrast_limits = self._contrast_limits
self._interpolation = {
2: Interpolation.NEAREST,
3: (
Interpolation3D.NEAREST
if self.__class__.__name__ == "Labels"
else Interpolation3D.LINEAR
),
}
self.interpolation = interpolation
self.rendering = rendering
# Trigger generation of view slice and thumbnail
self._update_dims()
|
def __init__(
self,
data,
*,
rgb=None,
colormap="gray",
contrast_limits=None,
gamma=1,
interpolation="nearest",
rendering="mip",
iso_threshold=0.5,
attenuation=0.5,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending="translucent",
visible=True,
multiscale=None,
):
if isinstance(data, types.GeneratorType):
data = list(data)
# Determine if data is a multiscale
if multiscale is None:
multiscale = guess_multiscale(data)
# Determine initial shape
if multiscale:
init_shape = data[0].shape
else:
init_shape = data.shape
# Determine if rgb
if rgb is None:
rgb = guess_rgb(init_shape)
# Determine dimensionality of the data
if rgb:
ndim = len(init_shape) - 1
else:
ndim = len(init_shape)
super().__init__(
data,
ndim,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
multiscale=multiscale,
)
self.events.add(
interpolation=Event,
rendering=Event,
iso_threshold=Event,
attenuation=Event,
)
# Set data
self.rgb = rgb
self._data = data
if self.multiscale:
self._data_level = len(self.data) - 1
# Determine which level of the multiscale to use for the thumbnail.
# Pick the smallest level with at least one axis >= 64. This is
# done to prevent the thumbnail from being from one of the very
# low resolution layers and therefore being very blurred.
big_enough_levels = [np.any(np.greater_equal(p.shape, 64)) for p in data]
if np.any(big_enough_levels):
self._thumbnail_level = np.where(big_enough_levels)[0][-1]
else:
self._thumbnail_level = 0
else:
self._data_level = 0
self._thumbnail_level = 0
self.corner_pixels[1] = self.level_shapes[self._data_level]
# Intitialize image views and thumbnails with zeros
if self.rgb:
self._data_view = np.zeros((1,) * self.dims.ndisplay + (self.shape[-1],))
else:
self._data_view = np.zeros((1,) * self.dims.ndisplay)
self._data_raw = self._data_view
self._data_thumbnail = self._data_view
# Set contrast_limits and colormaps
self._gamma = gamma
self._iso_threshold = iso_threshold
self._attenuation = attenuation
if contrast_limits is None:
self.contrast_limits_range = self._calc_data_range()
else:
self.contrast_limits_range = contrast_limits
self._contrast_limits = tuple(self.contrast_limits_range)
self.colormap = colormap
self.contrast_limits = self._contrast_limits
self._interpolation = {
2: Interpolation.NEAREST,
3: (
Interpolation3D.NEAREST
if self.__class__.__name__ == "Labels"
else Interpolation3D.LINEAR
),
}
self.interpolation = interpolation
self.rendering = rendering
# Trigger generation of view slice and thumbnail
self._update_dims()
|
https://github.com/napari/napari/issues/1231
|
~/conda/envs/all/lib/python3.8/site-packages/napari/layers/image/image.py in __init__(self, data, rgb, colormap, contrast_limits, gamma, interpolation, rendering, iso_threshold, attenuation, name, metadata, scale, translate, opacity, blending, visible, multiscale)
162 init_shape = data[0].shape
163 else:
--> 164 init_shape = data.shape
165
166 # Determine if rgb
AttributeError: 'list' object has no attribute 'shape'
|
AttributeError
|
def on_key_release(self, event):
"""Called whenever key released in canvas.
Parameters
----------
event : qtpy.QtCore.QEvent
Event from the Qt context.
"""
if event.key is None:
return
combo = components_to_key_combo(event.key.name, event.modifiers)
self.viewer.release_key(combo)
|
def on_key_release(self, event):
"""Called whenever key released in canvas.
Parameters
----------
event : qtpy.QtCore.QEvent
Event from the Qt context.
"""
combo = components_to_key_combo(event.key.name, event.modifiers)
self.viewer.release_key(combo)
|
https://github.com/napari/napari/issues/1203
|
WARNING: Traceback (most recent call last):
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/vispy/app/backends/_qt.py", line 505, in keyReleaseEvent
self._keyEvent(self._vispy_canvas.events.key_release, ev)
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/vispy/app/backends/_qt.py", line 551, in _keyEvent
func(native=ev, key=key, text=text_type(ev.text()), modifiers=mod)
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/vispy/util/event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/vispy/util/event.py", line 473, in _invoke_callback
_handle_exception(self.ignore_callback_errors,
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/vispy/util/event.py", line 471, in _invoke_callback
cb(event)
File "/home/jni/miniconda3/envs/all/lib/python3.8/site-packages/napari/_qt/qt_viewer.py", line 574, in on_key_release
combo = components_to_key_combo(event.key.name, event.modifiers)
AttributeError: 'NoneType' object has no attribute 'name'
|
AttributeError
|
def on_draw(self, event):
"""Called whenever the canvas is drawn.
This is triggered from vispy whenever new data is sent to the canvas or
the camera is moved and is connected in the `QtViewer`.
"""
self.layer.scale_factor = self.scale_factor
old_corner_pixels = self.layer.corner_pixels
self.layer.corner_pixels = self.coordinates_of_canvas_corners()
# For 2D multiscale data determine if new data has been requested
if (
self.layer.multiscale
and self.layer.dims.ndisplay == 2
and self.node.canvas is not None
):
self.layer._update_multiscale(
corner_pixels=old_corner_pixels,
shape_threshold=self.node.canvas.size,
)
|
def on_draw(self, event):
"""Called whenever the canvas is drawn.
This is triggered from vispy whenever new data is sent to the canvas or
the camera is moved and is connected in the `QtViewer`.
"""
self.layer.scale_factor = self.scale_factor
old_corner_pixels = self.layer.corner_pixels
self.layer.corner_pixels = self.coordinates_of_canvas_corners()
if self.layer.multiscale and self.node.canvas is not None:
self.layer._update_multiscale(
corner_pixels=old_corner_pixels,
shape_threshold=self.node.canvas.size,
)
|
https://github.com/napari/napari/issues/1178
|
Traceback (most recent call last):
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/app/backends/_qt.py", line 825, in paintGL
self._vispy_canvas.events.draw(region=None)
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/util/event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/util/event.py", line 475, in _invoke_callback
self, cb_event=(cb, event))
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/util/event.py", line 471, in _invoke_callback
cb(event)
File "/home/jni/projects/napari/napari/_vispy/vispy_base_layer.py", line 241, in on_draw
shape_threshold=self.node.canvas.size,
File "/home/jni/projects/napari/napari/layers/base/base.py", line 675, in _update_multiscale
downsample_factors,
File "/home/jni/projects/napari/napari/layers/utils/layer_utils.py", line 249, in compute_multiscale_level
locations = np.argwhere(np.all(scaled_shape > shape_threshold, axis=1))
ValueError: operands could not be broadcast together with shapes (2,3) (2,)
|
ValueError
|
def _update_multiscale(self, corner_pixels, shape_threshold):
"""Refresh layer multiscale if new resolution level or tile is required.
Parameters
----------
corner_pixels : array
Coordinates of the top-left and bottom-right canvas pixels in the
data space of each layer. The length of the tuple is equal to the
number of dimensions of the layer. If different from the current
layer corner_pixels the layer needs refreshing.
shape_threshold : tuple
Requested shape of field of view in data coordinates
"""
if len(self.dims.displayed) == 3:
data_level = corner_pixels.shape[1] - 1
else:
# Clip corner pixels inside data shape
new_corner_pixels = np.clip(
self.corner_pixels,
0,
np.subtract(self.level_shapes[self.data_level], 1),
)
# Scale to full resolution of the data
requested_shape = (
new_corner_pixels[1] - new_corner_pixels[0]
) * self.downsample_factors[self.data_level]
downsample_factors = self.downsample_factors[:, self.dims.displayed]
data_level = compute_multiscale_level(
requested_shape[self.dims.displayed],
shape_threshold,
downsample_factors,
)
if data_level != self.data_level:
# Set the data level, which will trigger a layer refresh and
# further updates including recalculation of the corner_pixels
# for the new level
self.data_level = data_level
self.refresh()
elif not np.all(self.corner_pixels == corner_pixels):
self.refresh()
|
def _update_multiscale(self, corner_pixels, shape_threshold):
"""Refresh layer multiscale if new resolution level or tile is required.
Parameters
----------
corner_pixels : array
Coordinates of the top-left and bottom-right canvas pixels in the
data space of each layer. The length of the tuple is equal to the
number of dimensions of the layer. If different from the current
layer corner_pixels the layer needs refreshing.
requested_shape : tuple
Requested shape of field of view in data coordinates
"""
# Clip corner pixels inside data shape
new_corner_pixels = np.clip(
self.corner_pixels,
0,
np.subtract(self.level_shapes[self.data_level], 1),
)
# Scale to full resolution of the data
requested_shape = (
new_corner_pixels[1] - new_corner_pixels[0]
) * self.downsample_factors[self.data_level]
downsample_factors = self.downsample_factors[:, self.dims.displayed]
data_level = compute_multiscale_level(
requested_shape[self.dims.displayed],
shape_threshold,
downsample_factors,
)
if data_level != self.data_level:
# Set the data level, which will trigger a layer refresh and
# further updates including recalculation of the corner_pixels
# for the new level
self.data_level = data_level
self.refresh()
elif not np.all(self.corner_pixels == corner_pixels):
self.refresh()
|
https://github.com/napari/napari/issues/1178
|
Traceback (most recent call last):
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/app/backends/_qt.py", line 825, in paintGL
self._vispy_canvas.events.draw(region=None)
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/util/event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/util/event.py", line 475, in _invoke_callback
self, cb_event=(cb, event))
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/util/event.py", line 471, in _invoke_callback
cb(event)
File "/home/jni/projects/napari/napari/_vispy/vispy_base_layer.py", line 241, in on_draw
shape_threshold=self.node.canvas.size,
File "/home/jni/projects/napari/napari/layers/base/base.py", line 675, in _update_multiscale
downsample_factors,
File "/home/jni/projects/napari/napari/layers/utils/layer_utils.py", line 249, in compute_multiscale_level
locations = np.argwhere(np.all(scaled_shape > shape_threshold, axis=1))
ValueError: operands could not be broadcast together with shapes (2,3) (2,)
|
ValueError
|
def _set_view_slice(self):
"""Set the view given the indices to slice with."""
not_disp = self.dims.not_displayed
if self.rgb:
# if rgb need to keep the final axis fixed during the
# transpose. The index of the final axis depends on how many
# axes are displayed.
order = self.dims.displayed_order + (max(self.dims.displayed_order) + 1,)
else:
order = self.dims.displayed_order
if self.multiscale:
# If 3d redering just show lowest level of multiscale
if self.dims.ndisplay == 3:
self.data_level = len(self.data) - 1
# Slice currently viewed level
level = self.data_level
indices = np.array(self.dims.indices)
downsampled_indices = (
indices[not_disp] / self.downsample_factors[level, not_disp]
)
downsampled_indices = np.round(downsampled_indices.astype(float)).astype(int)
downsampled_indices = np.clip(
downsampled_indices, 0, self.level_shapes[level, not_disp] - 1
)
indices[not_disp] = downsampled_indices
scale = np.ones(self.ndim)
for d in self.dims.displayed:
scale[d] = self.downsample_factors[self.data_level][d]
self._transforms["tile2data"].scale = scale
if self.dims.ndisplay == 2:
corner_pixels = np.clip(
self.corner_pixels,
0,
np.subtract(self.level_shapes[self.data_level], 1),
)
for d in self.dims.displayed:
indices[d] = slice(corner_pixels[0, d], corner_pixels[1, d] + 1, 1)
self._transforms["tile2data"].translate = (
corner_pixels[0]
* self._transforms["data2world"].scale
* self._transforms["tile2data"].scale
)
image = np.transpose(np.asarray(self.data[level][tuple(indices)]), order)
# Slice thumbnail
indices = np.array(self.dims.indices)
downsampled_indices = (
indices[not_disp] / self.downsample_factors[self._thumbnail_level, not_disp]
)
downsampled_indices = np.round(downsampled_indices.astype(float)).astype(int)
downsampled_indices = np.clip(
downsampled_indices,
0,
self.level_shapes[self._thumbnail_level, not_disp] - 1,
)
indices[not_disp] = downsampled_indices
thumbnail_source = np.asarray(
self.data[self._thumbnail_level][tuple(indices)]
).transpose(order)
else:
self._transforms["tile2data"].scale = np.ones(self.dims.ndim)
image = np.asarray(self.data[self.dims.indices]).transpose(order)
thumbnail_source = image
if self.rgb and image.dtype.kind == "f":
self._data_raw = np.clip(image, 0, 1)
self._data_view = self._raw_to_displayed(self._data_raw)
self._data_thumbnail = self._raw_to_displayed(np.clip(thumbnail_source, 0, 1))
else:
self._data_raw = image
self._data_view = self._raw_to_displayed(self._data_raw)
self._data_thumbnail = self._raw_to_displayed(thumbnail_source)
if self.multiscale:
self.events.scale()
self.events.translate()
|
def _set_view_slice(self):
"""Set the view given the indices to slice with."""
not_disp = self.dims.not_displayed
if self.rgb:
# if rgb need to keep the final axis fixed during the
# transpose. The index of the final axis depends on how many
# axes are displayed.
order = self.dims.displayed_order + (max(self.dims.displayed_order) + 1,)
else:
order = self.dims.displayed_order
if self.multiscale:
# If 3d redering just show lowest level of multiscale
if self.dims.ndisplay == 3:
self.data_level = len(self.data) - 1
# Slice currently viewed level
level = self.data_level
indices = np.array(self.dims.indices)
downsampled_indices = (
indices[not_disp] / self.downsample_factors[level, not_disp]
)
downsampled_indices = np.round(downsampled_indices.astype(float)).astype(int)
downsampled_indices = np.clip(
downsampled_indices, 0, self.level_shapes[level, not_disp] - 1
)
indices[not_disp] = downsampled_indices
scale = np.ones(self.ndim)
for d in self.dims.displayed:
scale[d] = self.downsample_factors[self.data_level][d]
self._transforms["tile2data"].scale = scale
corner_pixels = np.clip(
self.corner_pixels,
0,
np.subtract(self.level_shapes[self.data_level], 1),
)
for d in self.dims.displayed:
indices[d] = slice(corner_pixels[0, d], corner_pixels[1, d] + 1, 1)
self._transforms["tile2data"].translate = (
corner_pixels[0]
* self._transforms["data2world"].scale
* self._transforms["tile2data"].scale
)
image = np.transpose(np.asarray(self.data[level][tuple(indices)]), order)
# Slice thumbnail
indices = np.array(self.dims.indices)
downsampled_indices = (
indices[not_disp] / self.downsample_factors[self._thumbnail_level, not_disp]
)
downsampled_indices = np.round(downsampled_indices.astype(float)).astype(int)
downsampled_indices = np.clip(
downsampled_indices,
0,
self.level_shapes[self._thumbnail_level, not_disp] - 1,
)
indices[not_disp] = downsampled_indices
thumbnail_source = np.asarray(
self.data[self._thumbnail_level][tuple(indices)]
).transpose(order)
else:
self._transforms["tile2data"].scale = np.ones(self.dims.ndim)
image = np.asarray(self.data[self.dims.indices]).transpose(order)
thumbnail_source = image
if self.rgb and image.dtype.kind == "f":
self._data_raw = np.clip(image, 0, 1)
self._data_view = self._raw_to_displayed(self._data_raw)
self._data_thumbnail = self._raw_to_displayed(np.clip(thumbnail_source, 0, 1))
else:
self._data_raw = image
self._data_view = self._raw_to_displayed(self._data_raw)
self._data_thumbnail = self._raw_to_displayed(thumbnail_source)
if self.multiscale:
self.events.scale()
self.events.translate()
|
https://github.com/napari/napari/issues/1178
|
Traceback (most recent call last):
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/app/backends/_qt.py", line 825, in paintGL
self._vispy_canvas.events.draw(region=None)
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/util/event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/util/event.py", line 475, in _invoke_callback
self, cb_event=(cb, event))
File "/home/jni/miniconda3/envs/f7/lib/python3.7/site-packages/vispy/util/event.py", line 471, in _invoke_callback
cb(event)
File "/home/jni/projects/napari/napari/_vispy/vispy_base_layer.py", line 241, in on_draw
shape_threshold=self.node.canvas.size,
File "/home/jni/projects/napari/napari/layers/base/base.py", line 675, in _update_multiscale
downsample_factors,
File "/home/jni/projects/napari/napari/layers/utils/layer_utils.py", line 249, in compute_multiscale_level
locations = np.argwhere(np.all(scaled_shape > shape_threshold, axis=1))
ValueError: operands could not be broadcast together with shapes (2,3) (2,)
|
ValueError
|
def _remove_layer(self, event):
"""When a layer is removed, remove its parent.
Parameters
----------
event : qtpy.QtCore.QEvent
Event from the Qt context.
"""
layer = event.item
vispy_layer = self.layer_to_visual[layer]
self.canvas.events.draw.disconnect(vispy_layer.on_draw)
vispy_layer.node.transforms = ChainTransform()
vispy_layer.node.parent = None
del vispy_layer
|
def _remove_layer(self, event):
"""When a layer is removed, remove its parent.
Parameters
----------
event : qtpy.QtCore.QEvent
Event from the Qt context.
"""
layer = event.item
vispy_layer = self.layer_to_visual[layer]
vispy_layer.node.transforms = ChainTransform()
vispy_layer.node.parent = None
del self.layer_to_visual[layer]
|
https://github.com/napari/napari/issues/1157
|
Traceback (most recent call last):
File "XXXX\lib\site-packages\vispy\app\backends\_qt.py", line 825, in paintGL
self._vispy_canvas.events.draw(region=None)
File "XXXX\lib\site-packages\vispy\util\event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "XXXX\lib\site-packages\vispy\util\event.py", line 473, in _invoke_callback
_handle_exception(self.ignore_callback_errors,
File "XXXX\lib\site-packages\vispy\util\event.py", line 471, in _invoke_callback
cb(event)
File "YYY\napari\napari\_vispy\vispy_base_layer.py", line 234, in on_draw
self.layer.scale_factor = self.scale_factor
File "YYY\napari\napari\_vispy\vispy_base_layer.py", line 131, in scale_factor
transform = self.node.canvas.scene.node_transform(self.node)
AttributeError: 'NoneType' object has no attribute 'scene'
|
AttributeError
|
def scale_factor(self):
"""float: Conversion factor from canvas pixels to data coordinates."""
if self.node.canvas is not None:
transform = self.node.canvas.scene.node_transform(self.node)
return transform.map([1, 1])[0] - transform.map([0, 0])[0]
else:
return 1
|
def scale_factor(self):
"""float: Conversion factor from canvas coordinates to image
coordinates, which depends on the current zoom level.
"""
transform = self.node.canvas.scene.node_transform(self.node)
scale_factor = transform.map([1, 1])[0] - transform.map([0, 0])[0]
return scale_factor
|
https://github.com/napari/napari/issues/1157
|
Traceback (most recent call last):
File "XXXX\lib\site-packages\vispy\app\backends\_qt.py", line 825, in paintGL
self._vispy_canvas.events.draw(region=None)
File "XXXX\lib\site-packages\vispy\util\event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "XXXX\lib\site-packages\vispy\util\event.py", line 473, in _invoke_callback
_handle_exception(self.ignore_callback_errors,
File "XXXX\lib\site-packages\vispy\util\event.py", line 471, in _invoke_callback
cb(event)
File "YYY\napari\napari\_vispy\vispy_base_layer.py", line 234, in on_draw
self.layer.scale_factor = self.scale_factor
File "YYY\napari\napari\_vispy\vispy_base_layer.py", line 131, in scale_factor
transform = self.node.canvas.scene.node_transform(self.node)
AttributeError: 'NoneType' object has no attribute 'scene'
|
AttributeError
|
def _transform_position(self, position):
"""Transform cursor position from canvas space (x, y) into image space.
Parameters
-------
position : 2-tuple
Cursor position in canvase (x, y).
Returns
-------
coords : tuple
Coordinates of cursor in image space for displayed dimensions only
"""
nd = self.layer.dims.ndisplay
if self.node.canvas is not None:
transform = self.node.canvas.scene.node_transform(self.node)
# Map and offset position so that pixel center is at 0
mapped_position = transform.map(list(position))[:nd] - 0.5
return tuple(mapped_position[::-1])
else:
return (0,) * nd
|
def _transform_position(self, position):
"""Transform cursor position from canvas space (x, y) into image space.
Parameters
-------
position : 2-tuple
Cursor position in canvase (x, y).
Returns
-------
coords : tuple
Coordinates of cursor in image space for displayed dimensions only
"""
nd = self.layer.dims.ndisplay
if self.node.canvas is not None:
transform = self.node.canvas.scene.node_transform(self.node)
# Map and offset position so that pixel center is at 0
mapped_position = transform.map(list(position))[:nd] - 0.5
coords = tuple(mapped_position[::-1])
else:
coords = (0,) * nd
return coords
|
https://github.com/napari/napari/issues/1157
|
Traceback (most recent call last):
File "XXXX\lib\site-packages\vispy\app\backends\_qt.py", line 825, in paintGL
self._vispy_canvas.events.draw(region=None)
File "XXXX\lib\site-packages\vispy\util\event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "XXXX\lib\site-packages\vispy\util\event.py", line 473, in _invoke_callback
_handle_exception(self.ignore_callback_errors,
File "XXXX\lib\site-packages\vispy\util\event.py", line 471, in _invoke_callback
cb(event)
File "YYY\napari\napari\_vispy\vispy_base_layer.py", line 234, in on_draw
self.layer.scale_factor = self.scale_factor
File "YYY\napari\napari\_vispy\vispy_base_layer.py", line 131, in scale_factor
transform = self.node.canvas.scene.node_transform(self.node)
AttributeError: 'NoneType' object has no attribute 'scene'
|
AttributeError
|
def guess_continuous(property: np.ndarray) -> bool:
"""Guess if the property is continuous (return True) or categorical (return False)"""
# if the property is a floating type, guess continuous
if issubclass(property.dtype.type, np.floating) or len(np.unique(property)) > 16:
return True
else:
return False
|
def guess_continuous(property: np.ndarray) -> bool:
"""Guess if the property is continuous (return True) or categorical (return False)"""
# if the property is a floating type, guess continuous
if issubclass(property.dtype.type, np.floating) and len(property < 16):
return True
else:
return False
|
https://github.com/napari/napari/issues/930
|
(base) czirwc1macos2701:image-demos nsofroniew$ python test-examples/points_properties_empty.py
Traceback (most recent call last):
File "test-examples/points_properties_empty.py", line 29, in <module>
name='points'
File "/Users/nsofroniew/GitHub/napari/napari/components/add_layers_mixin.py", line 366, in add_points
visible=visible,
File "/Users/nsofroniew/GitHub/napari/napari/layers/points/points.py", line 336, in __init__
self._current_face_color = self.face_color[-1]
IndexError: index -1 is out of bounds for axis 0 with size 0
|
IndexError
|
def __init__(
self,
data=None,
*,
properties=None,
symbol="o",
size=10,
edge_width=1,
edge_color="black",
edge_color_cycle=None,
edge_colormap="viridis",
edge_contrast_limits=None,
face_color="white",
face_color_cycle=None,
face_colormap="viridis",
face_contrast_limits=None,
n_dimensional=False,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending="translucent",
visible=True,
):
if data is None:
data = np.empty((0, 2))
else:
data = np.atleast_2d(data)
ndim = data.shape[1]
super().__init__(
data,
ndim,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.events.add(
mode=Event,
size=Event,
edge_width=Event,
face_color=Event,
current_face_color=Event,
edge_color=Event,
current_edge_color=Event,
current_properties=Event,
symbol=Event,
n_dimensional=Event,
highlight=Event,
)
# update highlights when the layer is selected/deselected
self.events.select.connect(self._set_highlight)
self.events.deselect.connect(self._set_highlight)
self._colors = get_color_namelist()
# Save the point coordinates
self._data = np.asarray(data)
self.dims.clip = False
# Save the properties
if properties is None:
self._properties = {}
self._property_choices = {}
elif len(data) > 0:
properties = dataframe_to_properties(properties)
self._properties = self._validate_properties(properties)
self._property_choices = {k: np.unique(v) for k, v in properties.items()}
elif len(data) == 0:
self._property_choices = {k: np.asarray(v) for k, v in properties.items()}
empty_properties = {
k: np.empty(0, dtype=v.dtype) for k, v in self._property_choices.items()
}
self._properties = empty_properties
# Save the point style params
self.symbol = symbol
self._n_dimensional = n_dimensional
self.edge_width = edge_width
# The following point properties are for the new points that will
# be added. For any given property, if a list is passed to the
# constructor so each point gets its own value then the default
# value is used when adding new points
if np.isscalar(size):
self._current_size = np.asarray(size)
else:
self._current_size = 10
# Indices of selected points
self._selected_data = set()
self._selected_data_stored = set()
self._selected_data_history = set()
# Indices of selected points within the currently viewed slice
self._selected_view = []
# Index of hovered point
self._value = None
self._value_stored = None
self._mode = Mode.PAN_ZOOM
self._mode_history = self._mode
self._status = self.mode
self._highlight_index = []
self._highlight_box = None
self._drag_start = None
# initialize view data
self._indices_view = []
self._view_size_scale = []
self._drag_box = None
self._drag_box_stored = None
self._is_selecting = False
self._clipboard = {}
with self.block_update_properties():
self.edge_color_property = ""
self.edge_color = edge_color
if edge_color_cycle is None:
edge_color_cycle = deepcopy(DEFAULT_COLOR_CYCLE)
self.edge_color_cycle = edge_color_cycle
self.edge_color_cycle_map = {}
self.edge_colormap = edge_colormap
self._edge_contrast_limits = edge_contrast_limits
self._face_color_property = ""
self.face_color = face_color
if face_color_cycle is None:
face_color_cycle = deepcopy(DEFAULT_COLOR_CYCLE)
self.face_color_cycle = face_color_cycle
self.face_color_cycle_map = {}
self.face_colormap = face_colormap
self._face_contrast_limits = face_contrast_limits
self.refresh_colors()
self.size = size
# set the current_* properties
if len(data) > 0:
self._current_edge_color = self.edge_color[-1]
self._current_face_color = self.face_color[-1]
self.current_properties = {
k: np.asarray([v[-1]]) for k, v in self.properties.items()
}
elif len(data) == 0 and self.properties:
self.current_properties = {
k: np.asarray([v[0]]) for k, v in self._property_choices.items()
}
if self._edge_color_mode == ColorMode.DIRECT:
self._current_edge_color = transform_color_with_defaults(
num_entries=1,
colors=edge_color,
elem_name="edge_color",
default="white",
)
elif self._edge_color_mode == ColorMode.CYCLE:
curr_edge_color = transform_color(next(self.edge_color_cycle))
prop_value = self._property_choices[self._edge_color_property][0]
self.edge_color_cycle_map[prop_value] = curr_edge_color
self._current_edge_color = curr_edge_color
elif self._edge_color_mode == ColorMode.COLORMAP:
prop_value = self._property_choices[self._edge_color_property][0]
curr_edge_color, _ = map_property(
prop=prop_value,
colormap=self.edge_colormap[1],
contrast_limits=self._edge_contrast_limits,
)
self._current_edge_color = curr_edge_color
if self._face_color_mode == ColorMode.DIRECT:
self._current_face_color = transform_color_with_defaults(
num_entries=1,
colors=face_color,
elem_name="face_color",
default="white",
)
elif self._face_color_mode == ColorMode.CYCLE:
curr_face_color = transform_color(next(self.face_color_cycle))
prop_value = self._property_choices[self._face_color_property][0]
self.face_color_cycle_map[prop_value] = curr_face_color
self._current_face_color = curr_face_color
elif self._face_color_mode == ColorMode.COLORMAP:
prop_value = self._property_choices[self._face_color_property][0]
curr_face_color, _ = map_property(
prop=prop_value,
colormap=self.face_colormap[1],
contrast_limits=self._face_contrast_limits,
)
self._current_face_color = curr_face_color
else:
self._current_edge_color = self.edge_color[-1]
self._current_face_color = self.face_color[-1]
self.current_properties = {}
# Trigger generation of view slice and thumbnail
self._update_dims()
|
def __init__(
self,
data=None,
*,
properties=None,
symbol="o",
size=10,
edge_width=1,
edge_color="black",
edge_color_cycle=None,
edge_colormap="viridis",
edge_contrast_limits=None,
face_color="white",
face_color_cycle=None,
face_colormap="viridis",
face_contrast_limits=None,
n_dimensional=False,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending="translucent",
visible=True,
):
if data is None:
data = np.empty((0, 2))
else:
data = np.atleast_2d(data)
ndim = data.shape[1]
super().__init__(
data,
ndim,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.events.add(
mode=Event,
size=Event,
edge_width=Event,
face_color=Event,
current_face_color=Event,
edge_color=Event,
current_edge_color=Event,
symbol=Event,
n_dimensional=Event,
highlight=Event,
)
# update highlights when the layer is selected/deselected
self.events.select.connect(self._set_highlight)
self.events.deselect.connect(self._set_highlight)
self._colors = get_color_namelist()
# Save the point coordinates
self._data = np.asarray(data)
self.dims.clip = False
# Save the properties
if properties is None:
properties = {}
elif not isinstance(properties, dict):
properties = dataframe_to_properties(properties)
self._properties = self._validate_properties(properties)
# Save the point style params
self.symbol = symbol
self._n_dimensional = n_dimensional
self.edge_width = edge_width
# The following point properties are for the new points that will
# be added. For any given property, if a list is passed to the
# constructor so each point gets its own value then the default
# value is used when adding new points
if np.isscalar(size):
self._current_size = np.asarray(size)
else:
self._current_size = 10
# Indices of selected points
self._selected_data = set()
self._selected_data_stored = set()
self._selected_data_history = set()
# Indices of selected points within the currently viewed slice
self._selected_view = []
# Index of hovered point
self._value = None
self._value_stored = None
self._mode = Mode.PAN_ZOOM
self._mode_history = self._mode
self._status = self.mode
self._highlight_index = []
self._highlight_box = None
self._drag_start = None
# initialize view data
self._indices_view = []
self._view_size_scale = []
self._drag_box = None
self._drag_box_stored = None
self._is_selecting = False
self._clipboard = {}
with self.block_update_properties():
self.edge_color_property = ""
self.edge_color = edge_color
if edge_color_cycle is None:
edge_color_cycle = DEFAULT_COLOR_CYCLE
self.edge_color_cycle = edge_color_cycle
self.edge_color_cycle_map = {}
self.edge_colormap = edge_colormap
self._edge_contrast_limits = edge_contrast_limits
self._face_color_property = ""
self.face_color = face_color
if face_color_cycle is None:
face_color_cycle = DEFAULT_COLOR_CYCLE
self.face_color_cycle = face_color_cycle
self.face_color_cycle_map = {}
self.face_colormap = face_colormap
self._face_contrast_limits = face_contrast_limits
self.refresh_colors()
# set the current_* properties
self._current_edge_color = self.edge_color[-1]
self._current_face_color = self.face_color[-1]
self.size = size
self.current_properties = {
k: np.asarray([v[-1]]) for k, v in self.properties.items()
}
# Trigger generation of view slice and thumbnail
self._update_dims()
|
https://github.com/napari/napari/issues/930
|
(base) czirwc1macos2701:image-demos nsofroniew$ python test-examples/points_properties_empty.py
Traceback (most recent call last):
File "test-examples/points_properties_empty.py", line 29, in <module>
name='points'
File "/Users/nsofroniew/GitHub/napari/napari/components/add_layers_mixin.py", line 366, in add_points
visible=visible,
File "/Users/nsofroniew/GitHub/napari/napari/layers/points/points.py", line 336, in __init__
self._current_face_color = self.face_color[-1]
IndexError: index -1 is out of bounds for axis 0 with size 0
|
IndexError
|
def data(self, data: np.ndarray):
cur_npoints = len(self._data)
self._data = data
# Adjust the size array when the number of points has changed
if len(data) < cur_npoints:
# If there are now fewer points, remove the size and colors of the
# extra ones
with self.events.set_data.blocker():
self._edge_color = self.edge_color[: len(data)]
self._face_color = self.face_color[: len(data)]
self._size = self._size[: len(data)]
for k in self.properties:
self.properties[k] = self.properties[k][: len(data)]
elif len(data) > cur_npoints:
# If there are now more points, add the size and colors of the
# new ones
with self.events.set_data.blocker():
adding = len(data) - cur_npoints
if len(self._size) > 0:
new_size = copy(self._size[-1])
for i in self.dims.displayed:
new_size[i] = self.current_size
else:
# Add the default size, with a value for each dimension
new_size = np.repeat(self.current_size, self._size.shape[1])
size = np.repeat([new_size], adding, axis=0)
for k in self.properties:
new_property = np.repeat(self.current_properties[k], adding, axis=0)
self.properties[k] = np.concatenate(
(self.properties[k], new_property), axis=0
)
# add new edge colors
if self._edge_color_mode == ColorMode.DIRECT:
new_edge_colors = np.tile(self._current_edge_color, (adding, 1))
elif self._edge_color_mode == ColorMode.CYCLE:
edge_color_property = self.current_properties[
self._edge_color_property
][0]
# check if the new edge color property is in the cycle map
# and add it if it is not
edge_color_cycle_keys = [*self.edge_color_cycle_map]
if edge_color_property not in edge_color_cycle_keys:
self.edge_color_cycle_map[edge_color_property] = next(
self.edge_color_cycle
)
new_edge_colors = np.tile(
self.edge_color_cycle_map[edge_color_property],
(adding, 1),
)
elif self._edge_color_mode == ColorMode.COLORMAP:
edge_color_property_value = self.current_properties[
self._edge_color_property
][0]
ec, _ = map_property(
prop=edge_color_property_value,
colormap=self.edge_colormap[1],
contrast_limits=self._edge_contrast_limits,
)
new_edge_colors = np.tile(ec, (adding, 1))
self._edge_color = np.vstack((self.edge_color, new_edge_colors))
# add new face colors
if self._face_color_mode == ColorMode.DIRECT:
new_face_colors = np.tile(self._current_face_color, (adding, 1))
elif self._face_color_mode == ColorMode.CYCLE:
face_color_property = self.current_properties[
self._face_color_property
][0]
# check if the new edge color property is in the cycle map
# and add it if it is not
face_color_cycle_keys = [*self.face_color_cycle_map]
if face_color_property not in face_color_cycle_keys:
self.face_color_cycle_map[face_color_property] = next(
self.face_color_cycle
)
new_face_colors = np.tile(
self.face_color_cycle_map[face_color_property],
(adding, 1),
)
elif self._face_color_mode == ColorMode.COLORMAP:
face_color_property_value = self.current_properties[
self._face_color_property
][0]
fc, _ = map_property(
prop=face_color_property_value,
colormap=self.face_colormap[1],
contrast_limits=self._face_contrast_limits,
)
new_face_colors = np.tile(fc, (adding, 1))
self._face_color = np.vstack((self.face_color, new_face_colors))
self.size = np.concatenate((self._size, size), axis=0)
self.selected_data = set(np.arange(cur_npoints, len(data)))
self._update_dims()
self.events.data()
|
def data(self, data: np.ndarray):
cur_npoints = len(self._data)
self._data = data
# Adjust the size array when the number of points has changed
if len(data) < cur_npoints:
# If there are now fewer points, remove the size and colors of the
# extra ones
with self.events.set_data.blocker():
self._edge_color = self.edge_color[: len(data)]
self._face_color = self.face_color[: len(data)]
self._size = self._size[: len(data)]
for k in self.properties:
self.properties[k] = self.properties[k][: len(data)]
elif len(data) > cur_npoints:
# If there are now more points, add the size and colors of the
# new ones
with self.events.set_data.blocker():
adding = len(data) - cur_npoints
if len(self._size) > 0:
new_size = copy(self._size[-1])
for i in self.dims.displayed:
new_size[i] = self.current_size
else:
# Add the default size, with a value for each dimension
new_size = np.repeat(self.current_size, self._size.shape[1])
size = np.repeat([new_size], adding, axis=0)
for k in self.properties:
new_property = np.repeat(self.current_properties[k], adding, axis=0)
self.properties[k] = np.concatenate(
(self.properties[k], new_property), axis=0
)
# add new edge colors
if self._edge_color_mode == ColorMode.DIRECT:
new_edge_colors = np.tile(self._current_edge_color, (adding, 1))
elif self._edge_color_mode == ColorMode.CYCLE:
edge_color_property = self.current_properties[
self._edge_color_property
][0]
new_edge_colors = np.tile(
self.edge_color_cycle_map[edge_color_property],
(adding, 1),
)
elif self._edge_color_mode == ColorMode.COLORMAP:
edge_color_property_value = self.current_properties[
self._edge_color_property
][0]
ec, _ = map_property(
prop=edge_color_property_value,
colormap=self.edge_colormap[1],
contrast_limits=self._edge_contrast_limits,
)
new_edge_colors = np.tile(ec, (adding, 1))
self._edge_color = np.vstack((self.edge_color, new_edge_colors))
# add new face colors
if self._face_color_mode == ColorMode.DIRECT:
new_face_colors = np.tile(self._current_face_color, (adding, 1))
elif self._face_color_mode == ColorMode.CYCLE:
face_color_property_value = self.current_properties[
self._face_color_property
][0]
new_face_colors = np.tile(
self.face_color_cycle_map[face_color_property_value],
(adding, 1),
)
elif self._face_color_mode == ColorMode.COLORMAP:
face_color_property_value = self.current_properties[
self._face_color_property
][0]
fc, _ = map_property(
prop=face_color_property_value,
colormap=self.face_colormap[1],
contrast_limits=self._face_contrast_limits,
)
new_face_colors = np.tile(fc, (adding, 1))
self._face_color = np.vstack((self.face_color, new_face_colors))
self.size = np.concatenate((self._size, size), axis=0)
self.selected_data = set(np.arange(cur_npoints, len(data)))
self._update_dims()
self.events.data()
|
https://github.com/napari/napari/issues/930
|
(base) czirwc1macos2701:image-demos nsofroniew$ python test-examples/points_properties_empty.py
Traceback (most recent call last):
File "test-examples/points_properties_empty.py", line 29, in <module>
name='points'
File "/Users/nsofroniew/GitHub/napari/napari/components/add_layers_mixin.py", line 366, in add_points
visible=visible,
File "/Users/nsofroniew/GitHub/napari/napari/layers/points/points.py", line 336, in __init__
self._current_face_color = self.face_color[-1]
IndexError: index -1 is out of bounds for axis 0 with size 0
|
IndexError
|
def properties(self, properties: Dict[str, np.ndarray]):
if not isinstance(properties, dict):
properties = dataframe_to_properties(properties)
self._properties = self._validate_properties(properties)
if self._face_color_property and (
self._face_color_property not in self._properties
):
self._face_color_property = ""
warnings.warn("property used for face_color dropped")
if self._edge_color_property and (
self._edge_color_property not in self._properties
):
self._edge_color_property = ""
warnings.warn("property used for edge_color dropped")
|
def properties(self, properties: Dict[str, np.ndarray]):
if not isinstance(properties, dict):
properties = dataframe_to_properties(properties)
self._properties = self._validate_properties(properties)
if self._face_color_property and (
self._face_color_property not in self._properties
):
self._face_color_property = ""
warnings.warn("property used for face_color dropped")
|
https://github.com/napari/napari/issues/930
|
(base) czirwc1macos2701:image-demos nsofroniew$ python test-examples/points_properties_empty.py
Traceback (most recent call last):
File "test-examples/points_properties_empty.py", line 29, in <module>
name='points'
File "/Users/nsofroniew/GitHub/napari/napari/components/add_layers_mixin.py", line 366, in add_points
visible=visible,
File "/Users/nsofroniew/GitHub/napari/napari/layers/points/points.py", line 336, in __init__
self._current_face_color = self.face_color[-1]
IndexError: index -1 is out of bounds for axis 0 with size 0
|
IndexError
|
def _validate_properties(self, properties: Dict[str, np.ndarray]):
"""Validates the type and size of the properties"""
for k, v in properties.items():
if len(v) != len(self.data):
raise ValueError("the number of properties must equal the number of points")
# ensure the property values are a numpy array
if type(v) != np.ndarray:
properties[k] = np.asarray(v)
return properties
|
def _validate_properties(self, properties: Dict[str, np.ndarray]):
"""Validates the type and size of the properties"""
for v in properties.values():
if len(v) != len(self.data):
raise ValueError("the number of properties must equal the number of points")
return properties
|
https://github.com/napari/napari/issues/930
|
(base) czirwc1macos2701:image-demos nsofroniew$ python test-examples/points_properties_empty.py
Traceback (most recent call last):
File "test-examples/points_properties_empty.py", line 29, in <module>
name='points'
File "/Users/nsofroniew/GitHub/napari/napari/components/add_layers_mixin.py", line 366, in add_points
visible=visible,
File "/Users/nsofroniew/GitHub/napari/napari/layers/points/points.py", line 336, in __init__
self._current_face_color = self.face_color[-1]
IndexError: index -1 is out of bounds for axis 0 with size 0
|
IndexError
|
def refresh_colors(self, update_color_mapping: bool = False):
"""Calculate and update face and edge colors if using a cycle or color map
Parameters
----------
update_color_mapping : bool
If set to True, the function will recalculate the color cycle map
or colormap (whichever is being used). If set to False, the function
will use the current color cycle map or color map. For example, if you
are adding/modifying points and want them to be colored with the same
mapping as the other points (i.e., the new points shouldn't affect
the color cycle map or colormap), set update_color_mapping=False.
Default value is False.
"""
if self._update_properties:
if self._face_color_mode == ColorMode.CYCLE:
face_color_properties = self.properties[self._face_color_property]
if update_color_mapping:
self.face_color_cycle_map = {
k: c
for k, c in zip(
np.unique(face_color_properties),
self.face_color_cycle,
)
}
else:
# add properties if they are not in the colormap
# and update_color_mapping==False
face_color_cycle_keys = [*self.face_color_cycle_map]
props_in_map = np.in1d(face_color_properties, face_color_cycle_keys)
if not np.all(props_in_map):
props_to_add = np.unique(
face_color_properties[np.logical_not(props_in_map)]
)
for prop in props_to_add:
self.face_color_cycle_map[prop] = next(self.face_color_cycle)
face_colors = np.array(
[self.face_color_cycle_map[x] for x in face_color_properties]
)
if len(face_colors) == 0:
face_colors = np.empty((0, 4))
self._face_color = face_colors
self.events.face_color()
elif self._face_color_mode == ColorMode.COLORMAP:
face_color_properties = self.properties[self._face_color_property]
if len(face_color_properties) > 0:
if update_color_mapping or self.face_contrast_limits is None:
face_colors, contrast_limits = map_property(
prop=face_color_properties,
colormap=self.face_colormap[1],
)
self.face_contrast_limits = contrast_limits
else:
face_colors, _ = map_property(
prop=face_color_properties,
colormap=self.face_colormap[1],
contrast_limits=self.face_contrast_limits,
)
else:
face_colors = np.empty((0, 4))
self._face_color = face_colors
if self._edge_color_mode == ColorMode.CYCLE:
edge_color_properties = self.properties[self._edge_color_property]
if update_color_mapping:
self.edge_color_cycle_map = {
k: c
for k, c in zip(
np.unique(edge_color_properties),
self.edge_color_cycle,
)
}
else:
# add properties if they are not in the colormap
# and update_color_mapping==False
edge_color_cycle_keys = [*self.edge_color_cycle_map]
props_in_map = np.in1d(edge_color_properties, edge_color_cycle_keys)
if not np.all(props_in_map):
props_to_add = np.unique(
edge_color_properties[np.logical_not(props_in_map)]
)
for prop in props_to_add:
self.edge_color_cycle_map[prop] = next(self.edge_color_cycle)
edge_colors = np.array(
[self.edge_color_cycle_map[x] for x in edge_color_properties]
)
if len(edge_colors) == 0:
edge_colors = np.empty((0, 4))
self._edge_color = edge_colors
elif self._edge_color_mode == ColorMode.COLORMAP:
edge_color_properties = self.properties[self._edge_color_property]
if len(edge_color_properties) > 0:
if update_color_mapping or self.edge_contrast_limits is None:
edge_colors, contrast_limits = map_property(
prop=edge_color_properties,
colormap=self.edge_colormap[1],
)
self.edge_contrast_limits = contrast_limits
else:
edge_colors, _ = map_property(
prop=edge_color_properties,
colormap=self.edge_colormap[1],
contrast_limits=self.edge_contrast_limits,
)
else:
edge_colors = np.empty((0, 4))
self._edge_color = edge_colors
self.events.face_color()
self.events.edge_color()
|
def refresh_colors(self, update_color_mapping: bool = False):
"""Calculate and update face and edge colors if using a cycle or color map
Parameters
----------
update_color_mapping : bool
If set to True, the function will recalculate the color cycle map
or colormap (whichever is being used). If set to False, the function
will use the current color cycle map or color map. For example, if you
are adding/modifying points and want them to be colored with the same
mapping as the other points (i.e., the new points shouldn't affect
the color cycle map or colormap), set update_color_mapping=False.
Default value is True.
"""
if self._update_properties:
if self._face_color_mode == ColorMode.CYCLE:
face_color_properties = self.properties[self._face_color_property]
if update_color_mapping:
self.face_color_cycle_map = {
k: c
for k, c in zip(
np.unique(face_color_properties),
self.face_color_cycle,
)
}
else:
# add properties if they are not in the colormap
# and update_color_mapping==False
face_color_cycle_keys = [*self.face_color_cycle_map]
props_in_map = np.in1d(face_color_properties, face_color_cycle_keys)
if not np.all(props_in_map):
props_to_add = np.unique(
face_color_properties[np.logical_not(props_in_map)]
)
for prop in props_to_add:
self.face_color_cycle_map[prop] = next(self.face_color_cycle)
face_colors = np.array(
[self.face_color_cycle_map[x] for x in face_color_properties]
)
self._face_color = face_colors
self.events.face_color()
elif self._face_color_mode == ColorMode.COLORMAP:
face_color_properties = self.properties[self._face_color_property]
if update_color_mapping or self.face_contrast_limits is None:
face_colors, contrast_limits = map_property(
prop=face_color_properties,
colormap=self.face_colormap[1],
)
self.face_contrast_limits = contrast_limits
else:
face_colors, _ = map_property(
prop=face_color_properties,
colormap=self.face_colormap[1],
contrast_limits=self.face_contrast_limits,
)
self._face_color = face_colors
if self._edge_color_mode == ColorMode.CYCLE:
edge_color_properties = self.properties[self._edge_color_property]
if update_color_mapping:
self.edge_color_cycle_map = {
k: c
for k, c in zip(
np.unique(edge_color_properties),
self.edge_color_cycle,
)
}
else:
# add properties if they are not in the colormap
# and update_color_mapping==False
edge_color_cycle_keys = [*self.edge_color_cycle_map]
props_in_map = np.in1d(edge_color_properties, edge_color_cycle_keys)
if not np.all(props_in_map):
props_to_add = np.unique(
edge_color_properties[np.logical_not(props_in_map)]
)
for prop in props_to_add:
self.edge_color_cycle_map[prop] = next(self.edge_color_cycle)
edge_colors = np.array(
[self.edge_color_cycle_map[x] for x in edge_color_properties]
)
self._edge_color = edge_colors
elif self._edge_color_mode == ColorMode.COLORMAP:
edge_color_properties = self.properties[self._edge_color_property]
if update_color_mapping or self.edge_contrast_limits is None:
edge_colors, contrast_limits = map_property(
prop=edge_color_properties,
colormap=self.edge_colormap[1],
)
self.edge_contrast_limits = contrast_limits
else:
edge_colors, _ = map_property(
prop=edge_color_properties,
colormap=self.edge_colormap[1],
contrast_limits=self.edge_contrast_limits,
)
self._edge_color = edge_colors
self.events.face_color()
self.events.edge_color()
|
https://github.com/napari/napari/issues/930
|
(base) czirwc1macos2701:image-demos nsofroniew$ python test-examples/points_properties_empty.py
Traceback (most recent call last):
File "test-examples/points_properties_empty.py", line 29, in <module>
name='points'
File "/Users/nsofroniew/GitHub/napari/napari/components/add_layers_mixin.py", line 366, in add_points
visible=visible,
File "/Users/nsofroniew/GitHub/napari/napari/layers/points/points.py", line 336, in __init__
self._current_face_color = self.face_color[-1]
IndexError: index -1 is out of bounds for axis 0 with size 0
|
IndexError
|
def selected_data(self, selected_data):
self._selected_data = set(selected_data)
selected = []
for c in self._selected_data:
if c in self._indices_view:
ind = list(self._indices_view).index(c)
selected.append(ind)
self._selected_view = selected
# Update properties based on selected points
if len(self._selected_data) == 0:
self._set_highlight()
return
index = list(self._selected_data)
edge_colors = np.unique(self.edge_color[index], axis=0)
if len(edge_colors) == 1:
edge_color = edge_colors[0]
with self.block_update_properties():
self.current_edge_color = edge_color
face_colors = np.unique(self.face_color[index], axis=0)
if len(face_colors) == 1:
face_color = face_colors[0]
with self.block_update_properties():
self.current_face_color = face_color
size = list(set([self.size[i, self.dims.displayed].mean() for i in index]))
if len(size) == 1:
size = size[0]
with self.block_update_properties():
self.current_size = size
properties = {k: np.unique(v[index], axis=0) for k, v in self.properties.items()}
n_unique_properties = np.array([len(v) for v in properties.values()])
if np.all(n_unique_properties == 1):
with self.block_update_properties():
self.current_properties = properties
self._set_highlight()
|
def selected_data(self, selected_data):
self._selected_data = set(selected_data)
selected = []
for c in self._selected_data:
if c in self._indices_view:
ind = list(self._indices_view).index(c)
selected.append(ind)
self._selected_view = selected
# Update properties based on selected points
if len(self._selected_data) == 0:
self._set_highlight()
return
index = list(self._selected_data)
edge_colors = np.unique(self.edge_color[index], axis=0)
if len(edge_colors) == 1:
edge_color = edge_colors[0]
with self.block_update_properties():
self.current_edge_color = edge_color
face_colors = np.unique(self.face_color[index], axis=0)
if len(face_colors) == 1:
face_color = face_colors[0]
with self.block_update_properties():
self.current_face_color = face_color
size = list(set([self.size[i, self.dims.displayed].mean() for i in index]))
if len(size) == 1:
size = size[0]
with self.block_update_properties():
self.current_size = size
properties = {k: np.unique(v[index], axis=0) for k, v in self.properties.items()}
n_unique_properties = np.array([len(v) for v in properties.values()])
if np.all(n_unique_properties == 1):
self.current_properties = properties
self._set_highlight()
|
https://github.com/napari/napari/issues/930
|
(base) czirwc1macos2701:image-demos nsofroniew$ python test-examples/points_properties_empty.py
Traceback (most recent call last):
File "test-examples/points_properties_empty.py", line 29, in <module>
name='points'
File "/Users/nsofroniew/GitHub/napari/napari/components/add_layers_mixin.py", line 366, in add_points
visible=visible,
File "/Users/nsofroniew/GitHub/napari/napari/layers/points/points.py", line 336, in __init__
self._current_face_color = self.face_color[-1]
IndexError: index -1 is out of bounds for axis 0 with size 0
|
IndexError
|
def __init__(self, qt_viewer, *, show=True):
self.qt_viewer = qt_viewer
self._qt_window = QMainWindow()
self._qt_window.setAttribute(Qt.WA_DeleteOnClose)
self._qt_window.setUnifiedTitleAndToolBarOnMac(True)
self._qt_center = QWidget(self._qt_window)
self._qt_window.setCentralWidget(self._qt_center)
self._qt_window.setWindowTitle(self.qt_viewer.viewer.title)
self._qt_center.setLayout(QHBoxLayout())
self._status_bar = QStatusBar()
self._qt_window.setStatusBar(self._status_bar)
self._add_menubar()
self._add_file_menu()
self._add_view_menu()
self._add_window_menu()
self._add_help_menu()
self._status_bar.showMessage("Ready")
self._help = QLabel("")
self._status_bar.addPermanentWidget(self._help)
self._qt_center.layout().addWidget(self.qt_viewer)
self._qt_center.layout().setContentsMargins(4, 0, 4, 0)
self._update_palette()
self._add_viewer_dock_widget(self.qt_viewer.dockConsole)
self._add_viewer_dock_widget(self.qt_viewer.dockLayerControls)
self._add_viewer_dock_widget(self.qt_viewer.dockLayerList)
self.qt_viewer.viewer.events.status.connect(self._status_changed)
self.qt_viewer.viewer.events.help.connect(self._help_changed)
self.qt_viewer.viewer.events.title.connect(self._title_changed)
self.qt_viewer.viewer.events.palette.connect(self._update_palette)
if show:
self.show()
|
def __init__(self, qt_viewer, *, show=True):
self.qt_viewer = qt_viewer
self._qt_window = QMainWindow()
self._qt_window.setAttribute(Qt.WA_DeleteOnClose)
self._qt_window.setUnifiedTitleAndToolBarOnMac(True)
self._qt_center = QWidget(self._qt_window)
self._qt_window.setCentralWidget(self._qt_center)
self._qt_window.setWindowTitle(self.qt_viewer.viewer.title)
self._qt_center.setLayout(QHBoxLayout())
self._status_bar = QStatusBar()
self._qt_window.setStatusBar(self._status_bar)
self._add_menubar()
self._add_file_menu()
self._add_view_menu()
self._add_window_menu()
self._add_help_menu()
self._status_bar.showMessage("Ready")
self._help = QLabel("")
self._status_bar.addPermanentWidget(self._help)
self._qt_center.layout().addWidget(self.qt_viewer)
self._qt_center.layout().setContentsMargins(4, 0, 4, 0)
self._update_palette(qt_viewer.viewer.palette)
self._add_viewer_dock_widget(self.qt_viewer.dockConsole)
self._add_viewer_dock_widget(self.qt_viewer.dockLayerControls)
self._add_viewer_dock_widget(self.qt_viewer.dockLayerList)
self.qt_viewer.viewer.events.status.connect(self._status_changed)
self.qt_viewer.viewer.events.help.connect(self._help_changed)
self.qt_viewer.viewer.events.title.connect(self._title_changed)
self.qt_viewer.viewer.events.palette.connect(
lambda event: self._update_palette(event.palette)
)
if show:
self.show()
|
https://github.com/napari/napari/issues/1079
|
Traceback (most recent call last):
File "/Users/gbokota/Documents/projekty/napari/napari/components/viewer_model.py", line 418, in _toggle_theme
self.theme = theme_names[(cur_theme + 1) % len(theme_names)]
File "/Users/gbokota/Documents/projekty/napari/napari/components/viewer_model.py", line 130, in theme
self.palette = self.themes[theme]
File "/Users/gbokota/Documents/projekty/napari/napari/components/viewer_model.py", line 114, in palette
self.events.palette()
File "/Users/gbokota/Documents/projekty/napari/napari/utils/event.py", line 508, in __call__
self._invoke_callback(cb, event)
File "/Users/gbokota/Documents/projekty/napari/napari/utils/event.py", line 529, in _invoke_callback
cb_event=(cb, event),
File "/Users/gbokota/Documents/projekty/napari/napari/utils/event.py", line 523, in _invoke_callback
cb(event)
File "/Users/gbokota/Documents/projekty/napari/napari/_qt/qt_main_window.py", line 99, in <lambda>
lambda event: self._update_palette(event.palette)
AttributeError: 'Event' object has no attribute 'palette'
|
AttributeError
|
def _update_palette(self, event=None):
"""Update widget color palette."""
# set window styles which don't use the primary stylesheet
# FIXME: this is a problem with the stylesheet not using properties
palette = self.qt_viewer.viewer.palette
self._status_bar.setStyleSheet(
template(
"QStatusBar { background: {{ background }}; color: {{ text }}; }",
**palette,
)
)
self._qt_center.setStyleSheet(
template("QWidget { background: {{ background }}; }", **palette)
)
self._qt_window.setStyleSheet(template(self.raw_stylesheet, **palette))
|
def _update_palette(self, palette):
"""Update widget color palette.
Parameters
----------
palette : qtpy.QtGui.QPalette
Color palette for each widget state (Active, Disabled, Inactive).
"""
# set window styles which don't use the primary stylesheet
# FIXME: this is a problem with the stylesheet not using properties
self._status_bar.setStyleSheet(
template(
"QStatusBar { background: {{ background }}; color: {{ text }}; }",
**palette,
)
)
self._qt_center.setStyleSheet(
template("QWidget { background: {{ background }}; }", **palette)
)
self._qt_window.setStyleSheet(template(self.raw_stylesheet, **palette))
|
https://github.com/napari/napari/issues/1079
|
Traceback (most recent call last):
File "/Users/gbokota/Documents/projekty/napari/napari/components/viewer_model.py", line 418, in _toggle_theme
self.theme = theme_names[(cur_theme + 1) % len(theme_names)]
File "/Users/gbokota/Documents/projekty/napari/napari/components/viewer_model.py", line 130, in theme
self.palette = self.themes[theme]
File "/Users/gbokota/Documents/projekty/napari/napari/components/viewer_model.py", line 114, in palette
self.events.palette()
File "/Users/gbokota/Documents/projekty/napari/napari/utils/event.py", line 508, in __call__
self._invoke_callback(cb, event)
File "/Users/gbokota/Documents/projekty/napari/napari/utils/event.py", line 529, in _invoke_callback
cb_event=(cb, event),
File "/Users/gbokota/Documents/projekty/napari/napari/utils/event.py", line 523, in _invoke_callback
cb(event)
File "/Users/gbokota/Documents/projekty/napari/napari/_qt/qt_main_window.py", line 99, in <lambda>
lambda event: self._update_palette(event.palette)
AttributeError: 'Event' object has no attribute 'palette'
|
AttributeError
|
def __init__(self, viewer):
super().__init__()
self.setAttribute(Qt.WA_DeleteOnClose)
self.pool = QThreadPool()
QCoreApplication.setAttribute(Qt.AA_UseStyleSheetPropagationInWidgetStyles, True)
self.viewer = viewer
self.dims = QtDims(self.viewer.dims)
self.controls = QtControls(self.viewer)
self.layers = QtLayerList(self.viewer.layers)
self.layerButtons = QtLayerButtons(self.viewer)
self.viewerButtons = QtViewerButtons(self.viewer)
self._console = None
layerList = QWidget()
layerList.setObjectName("layerList")
layerListLayout = QVBoxLayout()
layerListLayout.addWidget(self.layerButtons)
layerListLayout.addWidget(self.layers)
layerListLayout.addWidget(self.viewerButtons)
layerListLayout.setContentsMargins(8, 4, 8, 6)
layerList.setLayout(layerListLayout)
self.dockLayerList = QtViewerDockWidget(
self,
layerList,
name="layer list",
area="left",
allowed_areas=["left", "right"],
)
self.dockLayerControls = QtViewerDockWidget(
self,
self.controls,
name="layer controls",
area="left",
allowed_areas=["left", "right"],
)
self.dockConsole = QtViewerDockWidget(
self,
QWidget(),
name="console",
area="bottom",
allowed_areas=["top", "bottom"],
shortcut="Ctrl+Shift+C",
)
self.dockConsole.setVisible(False)
# because the console is loaded lazily in the @getter, this line just
# gets (or creates) the console when the dock console is made visible.
self.dockConsole.visibilityChanged.connect(
lambda visible: self.console if visible else None
)
self.dockLayerControls.visibilityChanged.connect(self._constrain_width)
self.dockLayerList.setMaximumWidth(258)
self.dockLayerList.setMinimumWidth(258)
# This dictionary holds the corresponding vispy visual for each layer
self.layer_to_visual = {}
self.viewerButtons.consoleButton.clicked.connect(self.toggle_console_visibility)
self.canvas = SceneCanvas(keys=None, vsync=True, parent=self)
self.canvas.events.ignore_callback_errors = False
self.canvas.events.draw.connect(self.dims.enable_play)
self.canvas.native.setMinimumSize(QSize(200, 200))
self.canvas.context.set_depth_func("lequal")
self.canvas.connect(self.on_mouse_move)
self.canvas.connect(self.on_mouse_press)
self.canvas.connect(self.on_mouse_release)
self.canvas.connect(self.on_key_press)
self.canvas.connect(self.on_key_release)
self.canvas.connect(self.on_draw)
self.view = self.canvas.central_widget.add_view()
self._update_camera()
main_widget = QWidget()
main_layout = QVBoxLayout()
main_layout.setContentsMargins(10, 22, 10, 2)
main_layout.addWidget(self.canvas.native)
main_layout.addWidget(self.dims)
main_layout.setSpacing(10)
main_widget.setLayout(main_layout)
self.setOrientation(Qt.Vertical)
self.addWidget(main_widget)
self._last_visited_dir = str(Path.home())
self._cursors = {
"cross": Qt.CrossCursor,
"forbidden": Qt.ForbiddenCursor,
"pointing": Qt.PointingHandCursor,
"standard": QCursor(),
}
self._update_palette()
self.viewer.events.interactive.connect(self._on_interactive)
self.viewer.events.cursor.connect(self._on_cursor)
self.viewer.events.reset_view.connect(self._on_reset_view)
self.viewer.events.palette.connect(self._update_palette)
self.viewer.layers.events.reordered.connect(self._reorder_layers)
self.viewer.layers.events.added.connect(self._add_layer)
self.viewer.layers.events.removed.connect(self._remove_layer)
self.viewer.dims.events.camera.connect(lambda event: self._update_camera())
# stop any animations whenever the layers change
self.viewer.events.layers_change.connect(lambda x: self.dims.stop())
self.setAcceptDrops(True)
|
def __init__(self, viewer):
super().__init__()
self.setAttribute(Qt.WA_DeleteOnClose)
self.pool = QThreadPool()
QCoreApplication.setAttribute(Qt.AA_UseStyleSheetPropagationInWidgetStyles, True)
self.viewer = viewer
self.dims = QtDims(self.viewer.dims)
self.controls = QtControls(self.viewer)
self.layers = QtLayerList(self.viewer.layers)
self.layerButtons = QtLayerButtons(self.viewer)
self.viewerButtons = QtViewerButtons(self.viewer)
self._console = None
layerList = QWidget()
layerList.setObjectName("layerList")
layerListLayout = QVBoxLayout()
layerListLayout.addWidget(self.layerButtons)
layerListLayout.addWidget(self.layers)
layerListLayout.addWidget(self.viewerButtons)
layerListLayout.setContentsMargins(8, 4, 8, 6)
layerList.setLayout(layerListLayout)
self.dockLayerList = QtViewerDockWidget(
self,
layerList,
name="layer list",
area="left",
allowed_areas=["left", "right"],
)
self.dockLayerControls = QtViewerDockWidget(
self,
self.controls,
name="layer controls",
area="left",
allowed_areas=["left", "right"],
)
self.dockConsole = QtViewerDockWidget(
self,
QWidget(),
name="console",
area="bottom",
allowed_areas=["top", "bottom"],
shortcut="Ctrl+Shift+C",
)
self.dockConsole.setVisible(False)
self.dockLayerControls.visibilityChanged.connect(self._constrain_width)
self.dockLayerList.setMaximumWidth(258)
self.dockLayerList.setMinimumWidth(258)
# This dictionary holds the corresponding vispy visual for each layer
self.layer_to_visual = {}
self.viewerButtons.consoleButton.clicked.connect(self.toggle_console_visibility)
self.canvas = SceneCanvas(keys=None, vsync=True, parent=self)
self.canvas.events.ignore_callback_errors = False
self.canvas.events.draw.connect(self.dims.enable_play)
self.canvas.native.setMinimumSize(QSize(200, 200))
self.canvas.context.set_depth_func("lequal")
self.canvas.connect(self.on_mouse_move)
self.canvas.connect(self.on_mouse_press)
self.canvas.connect(self.on_mouse_release)
self.canvas.connect(self.on_key_press)
self.canvas.connect(self.on_key_release)
self.canvas.connect(self.on_draw)
self.view = self.canvas.central_widget.add_view()
self._update_camera()
main_widget = QWidget()
main_layout = QVBoxLayout()
main_layout.setContentsMargins(10, 22, 10, 2)
main_layout.addWidget(self.canvas.native)
main_layout.addWidget(self.dims)
main_layout.setSpacing(10)
main_widget.setLayout(main_layout)
self.setOrientation(Qt.Vertical)
self.addWidget(main_widget)
self._last_visited_dir = str(Path.home())
self._cursors = {
"cross": Qt.CrossCursor,
"forbidden": Qt.ForbiddenCursor,
"pointing": Qt.PointingHandCursor,
"standard": QCursor(),
}
self._update_palette(None)
self.viewer.events.interactive.connect(self._on_interactive)
self.viewer.events.cursor.connect(self._on_cursor)
self.viewer.events.reset_view.connect(self._on_reset_view)
self.viewer.events.palette.connect(self._update_palette)
self.viewer.layers.events.reordered.connect(self._reorder_layers)
self.viewer.layers.events.added.connect(self._add_layer)
self.viewer.layers.events.removed.connect(self._remove_layer)
self.viewer.dims.events.camera.connect(lambda event: self._update_camera())
# stop any animations whenever the layers change
self.viewer.events.layers_change.connect(lambda x: self.dims.stop())
self.setAcceptDrops(True)
|
https://github.com/napari/napari/issues/1079
|
Traceback (most recent call last):
File "/Users/gbokota/Documents/projekty/napari/napari/components/viewer_model.py", line 418, in _toggle_theme
self.theme = theme_names[(cur_theme + 1) % len(theme_names)]
File "/Users/gbokota/Documents/projekty/napari/napari/components/viewer_model.py", line 130, in theme
self.palette = self.themes[theme]
File "/Users/gbokota/Documents/projekty/napari/napari/components/viewer_model.py", line 114, in palette
self.events.palette()
File "/Users/gbokota/Documents/projekty/napari/napari/utils/event.py", line 508, in __call__
self._invoke_callback(cb, event)
File "/Users/gbokota/Documents/projekty/napari/napari/utils/event.py", line 529, in _invoke_callback
cb_event=(cb, event),
File "/Users/gbokota/Documents/projekty/napari/napari/utils/event.py", line 523, in _invoke_callback
cb(event)
File "/Users/gbokota/Documents/projekty/napari/napari/_qt/qt_main_window.py", line 99, in <lambda>
lambda event: self._update_palette(event.palette)
AttributeError: 'Event' object has no attribute 'palette'
|
AttributeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.