after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def login_check():
users = db.mgmt_users
# validate username and password
username = request.form.get("username")
# get salt
if users.find({"username": username}).count() > 0:
if users.find({"username": username, "salt": {"$exists": True}}).count() > 0:
salt = (list(users.find({"username": username}))[0])["salt"]
else:
return render_template("login.html", status=["outdated_database", "error"])
# catch empyt
else:
salt = bytearray()
password = hashlib.sha256(
salt + bytes(request.form.get("password"), "utf-8")
).hexdigest()
person = User.get(username)
if person and person.password == password:
login_user(person)
return render_template("admin.html", status=["logged_in", "success"])
else:
return render_template("login.html", status=["wrong_combination", "warning"])
|
def login_check():
# validate username and password
username = request.form.get("username")
password = hashlib.sha256(bytes(request.form.get("password"), "utf-8")).hexdigest()
person = User.get(username)
if person and person.password == password:
login_user(person)
return render_template("admin.html", status=["logged_in", "success"])
else:
return render_template("login.html", status=["wrong_combination", "error"])
|
https://github.com/cve-search/cve-search/issues/34
|
Starting vendor
Traceback (most recent call last): ] 0/1440
File ".../cve-search/db_mgmt_vendorstatements.py", line 83, in <module>
bulk.find({'id': statement['id']}).upsert().update({'id': statement['id']}, {"$set":{'statement': statement['statement'], 'id': statement['id'], 'organization': statement['organization'], 'contributor': statement['contributor'], 'lastmodified': statement['lastmodified']}})
TypeError: update() takes 2 positional arguments but 3 were given
|
TypeError
|
def importList(self, importFile):
count = 0
# read each line from the import file and regex them to a cpe format
try:
for line in importFile:
if self.insert(line):
count += 1
if self.args.v:
print("{:d} products added to the list".format(count))
except IOError:
print("Could not open the file")
sys.exit()
|
def importList(self, importFile):
count = 0
# read each line from the import file and regex them to a cpe format
try:
for line in open(importFile):
if self.insert(line):
count += 1
if self.args.v:
print("{:d} products added to the list".format(count))
except IOError:
print("Could not open the file")
sys.exit()
|
https://github.com/cve-search/cve-search/issues/34
|
Starting vendor
Traceback (most recent call last): ] 0/1440
File ".../cve-search/db_mgmt_vendorstatements.py", line 83, in <module>
bulk.find({'id': statement['id']}).upsert().update({'id': statement['id']}, {"$set":{'statement': statement['statement'], 'id': statement['id'], 'organization': statement['organization'], 'contributor': statement['contributor'], 'lastmodified': statement['lastmodified']}})
TypeError: update() takes 2 positional arguments but 3 were given
|
TypeError
|
def process(self):
if self.args.d:
# drop the list
self.dropCollection()
elif self.args.i:
# get import file
textfile = self.args.i
# check if the collection is empty
count = self.countItems()
if count > 0 and self.args.f is False:
# not empty and not forced to drop
print("list already populated")
else:
# drop collection and repopulate it
self.dropCollection()
self.importList(open(textfile))
elif self.args.e:
# get export file
textfile = self.args.e
self.exportList(textfile)
elif self.args.a or self.args.A:
# get list of cpe's to add
if self.args.a:
cpeList = self.args.a
else:
cpeList = [x for x in open(self.args.A[0])]
# add each item from the list
count = 0
for cpeID in cpeList:
if self.insert(cpeID):
count += 1
if self.args.v:
print("{:d} products added to the list".format(count))
elif self.args.r or self.args.R:
# get list of cpe's to remove
if self.args.r:
cpeList = self.args.r
else:
cpeList = [x for x in open(self.args.R[0])]
# remove each item from the list
count = 0
for cpeID in cpeList:
amount = self.remove(cpeID)
count += amount
if self.args.v:
print("{:d} products removed from the list".format(count))
|
def process(self):
if self.args.d:
# drop the list
self.dropCollection()
elif self.args.i:
# get import file
textfile = self.args.i
# check if the collection is empty
count = self.countItems()
if count > 0 and self.args.f is False:
# not empty and not forced to drop
print("list already populated")
else:
# drop collection and repopulate it
self.dropCollection()
self.importList(textfile)
elif self.args.e:
# get export file
textfile = self.args.e
self.exportList(textfile)
elif self.args.a or self.args.A:
# get list of cpe's to add
if self.args.a:
cpeList = self.args.a
else:
cpeList = [x for x in open(self.args.A[0])]
# add each item from the list
count = 0
for cpeID in cpeList:
if self.insert(cpeID):
count += 1
if self.args.v:
print("{:d} products added to the list".format(count))
elif self.args.r or self.args.R:
# get list of cpe's to remove
if self.args.r:
cpeList = self.args.r
else:
cpeList = [x for x in open(self.args.R[0])]
# remove each item from the list
count = 0
for cpeID in cpeList:
amount = self.remove(cpeID)
count += amount
if self.args.v:
print("{:d} products removed from the list".format(count))
|
https://github.com/cve-search/cve-search/issues/34
|
Starting vendor
Traceback (most recent call last): ] 0/1440
File ".../cve-search/db_mgmt_vendorstatements.py", line 83, in <module>
bulk.find({'id': statement['id']}).upsert().update({'id': statement['id']}, {"$set":{'statement': statement['statement'], 'id': statement['id'], 'organization': statement['organization'], 'contributor': statement['contributor'], 'lastmodified': statement['lastmodified']}})
TypeError: update() takes 2 positional arguments but 3 were given
|
TypeError
|
def whitelistImport(force=None, path=None):
file = request.files["file"]
force = request.form.get("force")
count = countWhitelist()
if (count == 0) | (not count) | (force == "f"):
dropWhitelist()
importWhitelist(TextIOWrapper(file.stream))
status = ["wl_imported", "success"]
else:
status = ["wl_already_filled", "info"]
return render_template("admin.html", status=status)
|
def whitelistImport(force=None, path=None):
path = request.form.get("file")
force = request.form.get("force")
if matchFilePath(path):
if os.path.isfile(path):
count = countWhitelist()
if (count == 0) | (not count) | (force == "f"):
dropWhitelist()
importWhitelist(path)
status = ["wl_imported", "success"]
else:
status = ["wl_already_filled", "info"]
else:
status = ["invalid_path", "error"]
else:
status = ["invalid_path_format", "error"]
return render_template("admin.html", status=status)
|
https://github.com/cve-search/cve-search/issues/34
|
Starting vendor
Traceback (most recent call last): ] 0/1440
File ".../cve-search/db_mgmt_vendorstatements.py", line 83, in <module>
bulk.find({'id': statement['id']}).upsert().update({'id': statement['id']}, {"$set":{'statement': statement['statement'], 'id': statement['id'], 'organization': statement['organization'], 'contributor': statement['contributor'], 'lastmodified': statement['lastmodified']}})
TypeError: update() takes 2 positional arguments but 3 were given
|
TypeError
|
def whitelistExport(force=None, path=None):
file = request.files["file"]
filename = secure_filename(file.filename)
force = request.form.get("force")
if (force == "df") and (os.path.isfile(filename)):
status = ["wl_file_already_exists", "warning"]
else:
if os.path.isfile(filename):
os.remove(filename)
exportWhitelist(filename)
status = ["wl_exported", "success"]
return render_template("admin.html", status=status)
|
def whitelistExport(force=None, path=None):
path = request.form.get("file")
force = request.form.get("force")
if matchFilePath(path):
if (force == "df") and (os.path.isfile(path)):
status = ["wl_file_already_exists", "warning"]
else:
if os.path.isfile(path):
os.remove(path)
exportWhitelist(path)
status = ["wl_exported", "success"]
else:
status = ["invalid_path", "error"]
return render_template("admin.html", status=status)
|
https://github.com/cve-search/cve-search/issues/34
|
Starting vendor
Traceback (most recent call last): ] 0/1440
File ".../cve-search/db_mgmt_vendorstatements.py", line 83, in <module>
bulk.find({'id': statement['id']}).upsert().update({'id': statement['id']}, {"$set":{'statement': statement['statement'], 'id': statement['id'], 'organization': statement['organization'], 'contributor': statement['contributor'], 'lastmodified': statement['lastmodified']}})
TypeError: update() takes 2 positional arguments but 3 were given
|
TypeError
|
def blacklistImport():
file = request.files["file"]
force = request.form.get("force")
count = countBlacklist()
if (count == 0) | (not count) | (force == "f"):
dropBlacklist()
importBlacklist(TextIOWrapper(file.stream))
status = ["bl_imported", "success"]
else:
status = ["bl_already_filled", "info"]
return render_template("admin.html", status=status)
|
def blacklistImport():
path = request.form.get("file")
force = request.form.get("force")
if matchFilePath(path):
if os.path.isfile(path):
count = countBlacklist()
if (count == 0) | (not count) | (force == "f"):
dropBlacklist()
importBlacklist(path)
status = ["bl_imported", "success"]
else:
status = ["bl_already_filled", "info"]
else:
status = ["invalid_path", "error"]
else:
status = ["invalid_path_format", "error"]
return render_template("admin.html", status=status)
|
https://github.com/cve-search/cve-search/issues/34
|
Starting vendor
Traceback (most recent call last): ] 0/1440
File ".../cve-search/db_mgmt_vendorstatements.py", line 83, in <module>
bulk.find({'id': statement['id']}).upsert().update({'id': statement['id']}, {"$set":{'statement': statement['statement'], 'id': statement['id'], 'organization': statement['organization'], 'contributor': statement['contributor'], 'lastmodified': statement['lastmodified']}})
TypeError: update() takes 2 positional arguments but 3 were given
|
TypeError
|
def blacklistExport():
file = request.files["file"]
filename = secure_filename(file.filename)
force = request.form.get("force")
if (force == "df") and (os.path.isfile(filename)):
status = ["bl_file_already_exists", "warning"]
else:
if os.path.isfile(filename):
os.remove(filename)
exportBlacklist(filename)
status = ["bl_exported", "success"]
return render_template("admin.html", status=status)
|
def blacklistExport():
path = request.form.get("file")
force = request.form.get("force")
if matchFilePath(path):
if (force == "df") and (os.path.isfile(path)):
status = ["bl_file_already_exists", "warning"]
else:
if os.path.isfile(path):
os.remove(path)
exportBlacklist(path)
status = ["bl_exported", "success"]
else:
status = ["invalid_path", "error"]
return render_template("admin.html", status=status)
|
https://github.com/cve-search/cve-search/issues/34
|
Starting vendor
Traceback (most recent call last): ] 0/1440
File ".../cve-search/db_mgmt_vendorstatements.py", line 83, in <module>
bulk.find({'id': statement['id']}).upsert().update({'id': statement['id']}, {"$set":{'statement': statement['statement'], 'id': statement['id'], 'organization': statement['organization'], 'contributor': statement['contributor'], 'lastmodified': statement['lastmodified']}})
TypeError: update() takes 2 positional arguments but 3 were given
|
TypeError
|
def promptNewPass():
password = getpass.getpass("New password:")
verify = getpass.getpass("Verify password:")
if password != verify:
sys.exit(exits["passwordMatch"])
return pbkdf2_sha256.encrypt(password, rounds=rounds, salt_size=saltLength)
|
def promptNewPass():
password = getpass.getpass("New password:")
verify = getpass.getpass("Verify password:")
if password != verify:
sys.exit("The passwords don't match!")
# generate new salt
salt = os.urandom(32)
keyset = {"password": buildPassword(password, salt=salt), "salt": salt}
return keyset
|
https://github.com/cve-search/cve-search/issues/26
|
python3 ./db_mgmt.py -p
Database population started
Year 2002 imported.
Traceback (most recent call last):
File "./db_mgmt.py", line 186, in <module>
ret = collection.insert(ch.cves)
File "/usr/local/lib/python3.4/site-packages/pymongo/collection.py", line 410, in insert
_check_write_command_response(results)
File "/usr/local/lib/python3.4/site-packages/pymongo/helpers.py", line 198, in _check_write_command_response
raise DuplicateKeyError(error.get("errmsg"), 11000, error)
pymongo.errors.DuplicateKeyError: insertDocument :: caused by :: 11000 E11000 duplicate key error index: cvedb.cves.$_id_ dup key: { : ObjectId('54c7b9d737b9ad76355dcd4b') }
|
pymongo.errors.DuplicateKeyError
|
def masterLogin():
master = input("Master account username: ")
if verifyPass(getpass.getpass("Master password:"), master):
if collection.find({"username": master, "master": True}).count() == 0:
sys.exit(exits["noMaster"])
else:
sys.exit("Master user/password combination does not exist")
return True
|
def masterLogin():
master = input("Master account username: ")
masterPass = buildPassword(getpass.getpass("Master password:"), user=master)
if (
collection.find(
{"username": master, "password": masterPass, "master": True}
).count()
== 0
):
sys.exit("Master user/password combination does not exist")
return True
|
https://github.com/cve-search/cve-search/issues/26
|
python3 ./db_mgmt.py -p
Database population started
Year 2002 imported.
Traceback (most recent call last):
File "./db_mgmt.py", line 186, in <module>
ret = collection.insert(ch.cves)
File "/usr/local/lib/python3.4/site-packages/pymongo/collection.py", line 410, in insert
_check_write_command_response(results)
File "/usr/local/lib/python3.4/site-packages/pymongo/helpers.py", line 198, in _check_write_command_response
raise DuplicateKeyError(error.get("errmsg"), 11000, error)
pymongo.errors.DuplicateKeyError: insertDocument :: caused by :: 11000 E11000 duplicate key error index: cvedb.cves.$_id_ dup key: { : ObjectId('54c7b9d737b9ad76355dcd4b') }
|
pymongo.errors.DuplicateKeyError
|
def isLastAdmin(user):
if len(list(collection.find({"username": {"$ne": user}, "master": True}))) == 0:
sys.exit(exits["lastMaster"])
|
def isLastAdmin(user):
if len(list(collection.find({"username": {"$ne": user}, "master": True}))) == 0:
sys.exit(
"This user is the last admin in the database and thus can not be removed"
)
|
https://github.com/cve-search/cve-search/issues/26
|
python3 ./db_mgmt.py -p
Database population started
Year 2002 imported.
Traceback (most recent call last):
File "./db_mgmt.py", line 186, in <module>
ret = collection.insert(ch.cves)
File "/usr/local/lib/python3.4/site-packages/pymongo/collection.py", line 410, in insert
_check_write_command_response(results)
File "/usr/local/lib/python3.4/site-packages/pymongo/helpers.py", line 198, in _check_write_command_response
raise DuplicateKeyError(error.get("errmsg"), 11000, error)
pymongo.errors.DuplicateKeyError: insertDocument :: caused by :: 11000 E11000 duplicate key error index: cvedb.cves.$_id_ dup key: { : ObjectId('54c7b9d737b9ad76355dcd4b') }
|
pymongo.errors.DuplicateKeyError
|
def login_check():
users = db.mgmt_users
# validate username and password
username = request.form.get("username")
password = request.form.get("password")
person = User.get(username)
try:
if person and pbkdf2_sha256.verify(password, person.password):
login_user(person)
return render_template("admin.html", status=["logged_in", "success"])
else:
return render_template(
"login.html", status=["wrong_combination", "warning"]
)
except:
return render_template("login.html", status=["outdated_database", "error"])
|
def login_check():
users = db.mgmt_users
# validate username and password
username = request.form.get("username")
# get salt
if users.find({"username": username}).count() > 0:
if users.find({"username": username, "salt": {"$exists": True}}).count() > 0:
salt = (list(users.find({"username": username}))[0])["salt"]
else:
return render_template("login.html", status=["outdated_database", "error"])
else:
salt = bytearray()
password = hashlib.sha256(
salt + bytes(request.form.get("password"), "utf-8")
).hexdigest()
person = User.get(username)
if person and person.password == password:
login_user(person)
return render_template("admin.html", status=["logged_in", "success"])
else:
return render_template("login.html", status=["wrong_combination", "warning"])
|
https://github.com/cve-search/cve-search/issues/26
|
python3 ./db_mgmt.py -p
Database population started
Year 2002 imported.
Traceback (most recent call last):
File "./db_mgmt.py", line 186, in <module>
ret = collection.insert(ch.cves)
File "/usr/local/lib/python3.4/site-packages/pymongo/collection.py", line 410, in insert
_check_write_command_response(results)
File "/usr/local/lib/python3.4/site-packages/pymongo/helpers.py", line 198, in _check_write_command_response
raise DuplicateKeyError(error.get("errmsg"), 11000, error)
pymongo.errors.DuplicateKeyError: insertDocument :: caused by :: 11000 E11000 duplicate key error index: cvedb.cves.$_id_ dup key: { : ObjectId('54c7b9d737b9ad76355dcd4b') }
|
pymongo.errors.DuplicateKeyError
|
def main():
print("%s (server) #v%s\n" % (NAME, VERSION))
parser = optparse.OptionParser(version=VERSION)
parser.add_option(
"-c",
dest="config_file",
default=CONFIG_FILE,
help="Configuration file (default: '%s')" % os.path.split(CONFIG_FILE)[-1],
)
options, _ = parser.parse_args()
read_config(options.config_file)
if config.USE_SSL:
try:
import OpenSSL
except ImportError:
if subprocess.mswindows:
exit("[!] please install 'pyopenssl' (e.g. 'pip install pyopenssl')")
else:
msg, _ = (
"[!] please install 'pyopenssl'",
platform.linux_distribution()[0].lower(),
)
for distro, install in {
("fedora", "centos"): "sudo yum install pyOpenSSL",
("debian", "ubuntu"): "sudo apt-get install python-openssl",
}.items():
if _ in distro:
msg += " (e.g. '%s')" % install
break
exit(msg)
if not config.SSL_PEM or not os.path.isfile(config.SSL_PEM):
hint = (
"openssl req -new -x509 -keyout %s -out %s -days 365 -nodes -subj '/O=%s CA/C=EU'"
% (config.SSL_PEM or "server.pem", config.SSL_PEM or "server.pem", NAME)
)
exit(
"[!] invalid configuration value for 'SSL_PEM' ('%s')\n[?] (hint: \"%s\")"
% (config.SSL_PEM, hint)
)
def update_timer():
if config.USE_SERVER_UPDATE_TRAILS:
update_trails()
update_ipcat()
thread = threading.Timer(config.UPDATE_PERIOD, update_timer)
thread.daemon = True
thread.start()
if config.UDP_ADDRESS and config.UDP_PORT:
if check_sudo() is False:
exit(
"[!] please run '%s' with sudo/Administrator privileges when using 'UDP_ADDRESS' configuration value"
% __file__
)
start_logd(address=config.UDP_ADDRESS, port=config.UDP_PORT, join=False)
try:
update_timer()
start_httpd(
address=config.HTTP_ADDRESS,
port=config.HTTP_PORT,
pem=config.SSL_PEM if config.USE_SSL else None,
join=True,
)
except KeyboardInterrupt:
print("\r[x] stopping (Ctrl-C pressed)")
|
def main():
print("%s (server) #v%s\n" % (NAME, VERSION))
parser = optparse.OptionParser(version=VERSION)
parser.add_option(
"-c",
dest="config_file",
default=CONFIG_FILE,
help="Configuration file (default: '%s')" % os.path.split(CONFIG_FILE)[-1],
)
options, _ = parser.parse_args()
read_config(options.config_file)
if config.USE_SSL:
try:
import OpenSSL
except ImportError:
if subprocess.mswindows:
exit("[!] please install 'pyopenssl' (e.g. 'pip install pyopenssl')")
else:
msg, _ = (
"[!] please install 'pyopenssl'",
platform.linux_distribution()[0].lower(),
)
for distro, install in {
("fedora", "centos"): "sudo yum install pyOpenSSL",
("debian", "ubuntu"): "sudo apt-get install python-openssl",
}.items():
if _ in distro:
msg += " (e.g. '%s')" % install
break
exit(msg)
if not config.SSL_PEM or not os.path.isfile(config.SSL_PEM):
hint = (
"openssl req -new -x509 -keyout %s -out %s -days 365 -nodes -subj '/O=%s CA/C=EU'"
% (config.SSL_PEM or "server.pem", config.SSL_PEM or "server.pem", NAME)
)
exit(
"[!] invalid configuration value for 'SSL_PEM' ('%s')\n[?] (hint: \"%s\")"
% (config.SSL_PEM, hint)
)
def update_timer():
if config.USE_SERVER_UPDATE_TRAILS:
update_trails()
update_ipcat()
thread = threading.Timer(config.UPDATE_PERIOD, update_timer)
thread.daemon = True
thread.start()
if config.UDP_ADDRESS and config.UDP_PORT:
start_logd(address=config.UDP_ADDRESS, port=config.UDP_PORT, join=False)
try:
update_timer()
start_httpd(
address=config.HTTP_ADDRESS,
port=config.HTTP_PORT,
pem=config.SSL_PEM if config.USE_SSL else None,
join=True,
)
except KeyboardInterrupt:
print("\r[x] stopping (Ctrl-C pressed)")
|
https://github.com/stamparm/maltrail/issues/42
|
[i] starting HTTP server at 'http://0.0.0.0:8338/'
[o] running...
Traceback (most recent call last):
File "/home/admin/maltrail/core/httpd.py", line 514, in _counts
current = datetime.datetime.strptime(os.path.splitext(os.path.basename(filename))[0], DATE_FORMAT)
File "/usr/lib/python2.7/_strptime.py", line 325, in _strptime
(data_string, format))
ValueError: time data 'error' does not match format '%Y-%m-%d'
|
ValueError
|
def fetch():
retval = {}
content = retrieve_content(__url__)
if __check__ in content:
for match in re.finditer(r"(\d+\.\d+\.\d+\.\d+)/(\d+)", content):
prefix, mask = match.groups()
mask = int(mask)
if MIN_BLACKLIST_MASK <= mask <= MAX_BLACKLIST_MASK:
start_int = addr_to_int(prefix) & make_mask(mask)
end_int = start_int | ((1 << 32 - mask) - 1)
for address in xrange(start_int, end_int + 1):
retval[int_to_addr(address)] = (__info__, __reference__)
return retval
|
def fetch():
retval = {}
content = retrieve_content(__url__)
if __check__ in content:
for match in re.finditer(r"([\d.]+)/(\d+)", content):
prefix, mask = match.groups()
start_int = addr_to_int(prefix) & make_mask(int(mask))
end_int = start_int | ((1 << 32 - int(mask)) - 1)
for address in xrange(start_int, end_int + 1):
retval[int_to_addr(address)] = (__info__, __reference__)
return retval
|
https://github.com/stamparm/maltrail/issues/3
|
[i] updating trails...
[o] 'https://feodotracker.abuse.ch/blocklist/?download=domainblocklist'
[!] something went wrong during remote data retrieval ('https://feodotracker.abuse.ch/blocklist/?download=domainblocklist')
[o] 'https://www.badips.com/get/list/any/2?age=30d'
[o] 'https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/ri_web_proxies_30d.ipset'
[o] 'https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/proxylists_30d.ipset'
[o] 'http://www.botscout.com/last_caught_cache.htm'
[o] 'http://vxvault.siri-urz.net/URL_List.php'
[o] 'http://malwaredomains.lehigh.edu/files/domains.txt'
[o] 'https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/socks_proxy_30d.ipset'
[o] 'https://zeustracker.abuse.ch/blocklist.php?download=badips'
[o] 'https://openphish.com/feed.txt'
[o] 'https://myip.ms/files/blacklist/htaccess/latest_blacklist.txt'
[o] 'https://www.maxmind.com/en/proxy-detection-sample-list'
[o] 'https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist'
[o] 'http://malwareurls.joxeankoret.com/normal.txt'
[o] 'http://talosintel.com/feeds/ip-filter.blf'
[o] 'https://check.torproject.org/cgi-bin/TorBulkExitList.py?ip=1.1.1.1'
[o] 'https://rules.emergingthreats.net/open/suricata/rules/emerging-dns.rules'
[o] 'http://www.nothink.org/blacklist/blacklist_malware_irc.txt'
[o] 'http://danger.rulez.sk/projects/bruteforceblocker/blist.php'
[o] 'http://malwared.malwaremustdie.org/rss.php'
[o] 'http://www.voipbl.org/update/'
[!] Unhandled exception occurred ('Python int too large to convert to C long')
[x] Please report the following details at 'https://github.com/stamparm/maltrail/issues':
---
'Traceback (most recent call last):
File "sensor.py", line 389, in <module>
main()
File "sensor.py", line 382, in main
init()
File "sensor.py", line 287, in init
update_timer()
File "sensor.py", line 275, in update_timer
_ = update(server=config.UPDATE_SERVER)
File "/opt/maltrail/maltrail/core/update.py", line 82, in update
results = function()
File "/opt/maltrail/maltrail/trails/feeds/voipbl.py", line 29, in fetch
for address in xrange(start_int, end_int + 1):
OverflowError: Python int too large to convert to C long
'
---
|
OverflowError
|
def get_announcements(self, courses, **kwargs):
"""
List announcements.
:calls: `GET /api/v1/announcements \
<https://canvas.instructure.com/doc/api/announcements.html#method.announcements_api.index>`_
:param courses: Course ID(s) or <Course> objects to request announcements from.
:type courses: list
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.discussion_topic.DiscussionTopic`
"""
from canvasapi.discussion_topic import DiscussionTopic
if type(courses) is not list or len(courses) == 0:
raise RequiredFieldMissing("Course IDs need to be passed as a list")
# The type of object in `courses` is taken care of by obj_or_id, extracting the couse
# ID from a <Course> object or by returning plain strings.
course_ids = [obj_or_id(course_id, "course_id", (Course,)) for course_id in courses]
# Set the **kwargs object vaue so it can be combined with others passed by the user.
kwargs["context_codes"] = [f"course_{course_id}" for course_id in course_ids]
return PaginatedList(
DiscussionTopic,
self.__requester,
"GET",
"announcements",
context_codes=course_ids,
_kwargs=combine_kwargs(**kwargs),
)
|
def get_announcements(self, **kwargs):
"""
List announcements.
:calls: `GET /api/v1/announcements \
<https://canvas.instructure.com/doc/api/announcements.html#method.announcements_api.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.discussion_topic.DiscussionTopic`
"""
from canvasapi.discussion_topic import DiscussionTopic
return PaginatedList(
DiscussionTopic,
self.__requester,
"GET",
"announcements",
_kwargs=combine_kwargs(**kwargs),
)
|
https://github.com/ucfopen/canvasapi/issues/420
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-15-667b0a4b45f2> in <module>
2 announcements = canvas.get_announcements(context_codes=['course_{}'.format(course_id)])
3 a = announcements[0]
----> 4 a.update()
~/opt/anaconda3/lib/python3.7/site-packages/canvasapi/discussion_topic.py in update(self, **kwargs)
309 "PUT",
310 "{}s/{}/discussion_topics/{}".format(
--> 311 self._parent_type, self._parent_id, self.id
312 ),
313 _kwargs=combine_kwargs(**kwargs),
~/opt/anaconda3/lib/python3.7/site-packages/canvasapi/canvas_object.py in __getattribute__(self, name)
15
16 def __getattribute__(self, name):
---> 17 return super(CanvasObject, self).__getattribute__(name)
18
19 def __init__(self, requester, attributes):
~/opt/anaconda3/lib/python3.7/site-packages/canvasapi/discussion_topic.py in _parent_type(self)
34 return "group"
35 else:
---> 36 raise ValueError("Discussion Topic does not have a course_id or group_id")
37
38 def delete(self, **kwargs):
ValueError: Discussion Topic does not have a course_id or group_id
|
ValueError
|
def _parent_id(self):
"""
Return the id of the course or group that spawned this discussion topic.
:rtype: int
"""
if hasattr(self, "course_id"):
return self.course_id
elif hasattr(self, "group_id"):
return self.group_id
elif hasattr(self, "context_code"):
if self.context_code.startswith("course_"):
self.course_id = self.context_code.split("_")[1]
return self.course_id
elif self.context_code.startswith("group_"):
self.group_id = self.context_code.split("_")[1]
return self.group_id
else:
raise ValueError("Discussion Topic does not have a course_id or group_id")
|
def _parent_id(self):
"""
Return the id of the course or group that spawned this discussion topic.
:rtype: int
"""
if hasattr(self, "course_id"):
return self.course_id
elif hasattr(self, "group_id"):
return self.group_id
else:
raise ValueError("Discussion Topic does not have a course_id or group_id")
|
https://github.com/ucfopen/canvasapi/issues/420
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-15-667b0a4b45f2> in <module>
2 announcements = canvas.get_announcements(context_codes=['course_{}'.format(course_id)])
3 a = announcements[0]
----> 4 a.update()
~/opt/anaconda3/lib/python3.7/site-packages/canvasapi/discussion_topic.py in update(self, **kwargs)
309 "PUT",
310 "{}s/{}/discussion_topics/{}".format(
--> 311 self._parent_type, self._parent_id, self.id
312 ),
313 _kwargs=combine_kwargs(**kwargs),
~/opt/anaconda3/lib/python3.7/site-packages/canvasapi/canvas_object.py in __getattribute__(self, name)
15
16 def __getattribute__(self, name):
---> 17 return super(CanvasObject, self).__getattribute__(name)
18
19 def __init__(self, requester, attributes):
~/opt/anaconda3/lib/python3.7/site-packages/canvasapi/discussion_topic.py in _parent_type(self)
34 return "group"
35 else:
---> 36 raise ValueError("Discussion Topic does not have a course_id or group_id")
37
38 def delete(self, **kwargs):
ValueError: Discussion Topic does not have a course_id or group_id
|
ValueError
|
def _parent_type(self):
"""
Return whether the discussion topic was spawned from a course or group.
:rtype: str
"""
if hasattr(self, "course_id"):
return "course"
elif hasattr(self, "group_id"):
return "group"
elif hasattr(self, "context_code"):
if self.context_code.startswith("course"):
return "course"
elif self.context_code.startswith("group"):
return "group"
else:
raise ValueError("Discussion Topic does not have a course_id or group_id")
|
def _parent_type(self):
"""
Return whether the discussion topic was spawned from a course or group.
:rtype: str
"""
if hasattr(self, "course_id"):
return "course"
elif hasattr(self, "group_id"):
return "group"
else:
raise ValueError("Discussion Topic does not have a course_id or group_id")
|
https://github.com/ucfopen/canvasapi/issues/420
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-15-667b0a4b45f2> in <module>
2 announcements = canvas.get_announcements(context_codes=['course_{}'.format(course_id)])
3 a = announcements[0]
----> 4 a.update()
~/opt/anaconda3/lib/python3.7/site-packages/canvasapi/discussion_topic.py in update(self, **kwargs)
309 "PUT",
310 "{}s/{}/discussion_topics/{}".format(
--> 311 self._parent_type, self._parent_id, self.id
312 ),
313 _kwargs=combine_kwargs(**kwargs),
~/opt/anaconda3/lib/python3.7/site-packages/canvasapi/canvas_object.py in __getattribute__(self, name)
15
16 def __getattribute__(self, name):
---> 17 return super(CanvasObject, self).__getattribute__(name)
18
19 def __init__(self, requester, attributes):
~/opt/anaconda3/lib/python3.7/site-packages/canvasapi/discussion_topic.py in _parent_type(self)
34 return "group"
35 else:
---> 36 raise ValueError("Discussion Topic does not have a course_id or group_id")
37
38 def delete(self, **kwargs):
ValueError: Discussion Topic does not have a course_id or group_id
|
ValueError
|
def __init__(self, base_url, access_token):
"""
:param base_url: The base URL of the Canvas instance's API.
:type base_url: str
:param access_token: The API key to authenticate requests with.
:type access_token: str
"""
new_url = get_institution_url(base_url)
if "api/v1" in base_url:
warnings.warn(
"`base_url` no longer requires an API version be specified. "
"Rewriting `base_url` to {}".format(new_url),
DeprecationWarning,
)
if "http://" in base_url:
warnings.warn(
"Canvas may respond unexpectedly when making requests to HTTP "
"URLs. If possible, please use HTTPS.",
UserWarning,
)
if not base_url.strip():
warnings.warn(
"Canvas needs a valid URL, please provide a non-blank `base_url`.",
UserWarning,
)
if "://" not in base_url:
warnings.warn(
"An invalid `base_url` for the Canvas API Instance was used. "
"Please provide a valid HTTP or HTTPS URL if possible.",
UserWarning,
)
# Ensure that the user-supplied access token contains no leading or
# trailing spaces that may cause issues when communicating with
# the API.
access_token = access_token.strip()
base_url = new_url + "/api/v1/"
self.__requester = Requester(base_url, access_token)
|
def __init__(self, base_url, access_token):
"""
:param base_url: The base URL of the Canvas instance's API.
:type base_url: str
:param access_token: The API key to authenticate requests with.
:type access_token: str
"""
new_url = get_institution_url(base_url)
if "api/v1" in base_url:
warnings.warn(
"`base_url` no longer requires an API version be specified. "
"Rewriting `base_url` to {}".format(new_url),
DeprecationWarning,
)
if "http://" in base_url:
warnings.warn(
"Canvas may respond unexpectedly when making requests to HTTP "
"URLs. If possible, please use HTTPS.",
UserWarning,
)
if not base_url.strip():
warnings.warn(
"Canvas needs a valid URL, please provide a non-blank `base_url`.",
UserWarning,
)
if "://" not in base_url:
warnings.warn(
"An invalid `base_url` for the Canvas API Instance was used. "
"Please provide a valid HTTP or HTTPS URL if possible.",
UserWarning,
)
base_url = new_url + "/api/v1/"
self.__requester = Requester(base_url, access_token)
|
https://github.com/ucfopen/canvasapi/issues/301
|
Traceback (most recent call last):
File "_test.py", line 42, in <module>
course = canvas.get_course(COURSE_ID)
File "<YANKED>canvasapi/canvas.py", line 603, in get_course
"GET", uri_str.format(course_id), _kwargs=combine_kwargs(**kwargs)
File "<YANKED>canvasapi/requester.py", line 191, in request
"Headers: {headers}".format(headers=pformat(clean_headers(headers)))
File "<YANKED>canvasapi/util.py", line 248, in clean_headers
_, token = authorization_header.split(" ")
ValueError: too many values to unpack (expected 2)
|
ValueError
|
def clean_headers(headers):
"""
Sanitize a dictionary containing HTTP headers of sensitive values.
:param headers: The headers to sanitize.
:type headers: dict
:returns: A list of headers without sensitive information stripped out.
:rtype: dict
"""
cleaned_headers = headers.copy()
authorization_header = headers.get("Authorization")
if authorization_header:
sanitized = "****" + authorization_header[-4:]
cleaned_headers["Authorization"] = sanitized
return cleaned_headers
|
def clean_headers(headers):
"""
Sanitize a dictionary containing HTTP headers of sensitive values.
:param headers: The headers to sanitize.
:type headers: dict
:returns: A list of headers without sensitive information stripped out.
:rtype: dict
"""
cleaned_headers = headers.copy()
authorization_header = headers.get("Authorization")
if authorization_header:
# Grab the actual token (not the "Bearer" prefix)
_, token = authorization_header.split(" ")
# Trim all but the last four characters
sanitized = "****" + token[-4:]
cleaned_headers["Authorization"] = sanitized
return cleaned_headers
|
https://github.com/ucfopen/canvasapi/issues/301
|
Traceback (most recent call last):
File "_test.py", line 42, in <module>
course = canvas.get_course(COURSE_ID)
File "<YANKED>canvasapi/canvas.py", line 603, in get_course
"GET", uri_str.format(course_id), _kwargs=combine_kwargs(**kwargs)
File "<YANKED>canvasapi/requester.py", line 191, in request
"Headers: {headers}".format(headers=pformat(clean_headers(headers)))
File "<YANKED>canvasapi/util.py", line 248, in clean_headers
_, token = authorization_header.split(" ")
ValueError: too many values to unpack (expected 2)
|
ValueError
|
def create_external_tool(
self, name, privacy_level, consumer_key, shared_secret, **kwargs
):
"""
Create an external tool in the current account.
:calls: `POST /api/v1/accounts/:account_id/external_tools \
<https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.create>`_
:param name: The name of the tool
:type name: str
:param privacy_level: What information to send to the external
tool. Options are "anonymous", "name_only", "public"
:type privacy_level: str
:param consumer_key: The consumer key for the external tool
:type consumer_key: str
:param shared_secret: The shared secret with the external tool
:type shared_secret: str
:rtype: :class:`canvasapi.external_tool.ExternalTool`
"""
from canvasapi.external_tool import ExternalTool
response = self._requester.request(
"POST",
"accounts/{}/external_tools".format(self.id),
name=name,
privacy_level=privacy_level,
consumer_key=consumer_key,
shared_secret=shared_secret,
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"account_id": self.id})
return ExternalTool(self._requester, response_json)
|
def create_external_tool(
self, name, privacy_level, consumer_key, shared_secret, **kwargs
):
"""
Create an external tool in the current account.
:calls: `POST /api/v1/accounts/:account_id/external_tools \
<https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.create>`_
:param name: The name of the tool
:type name: str
:param privacy_level: What information to send to the external
tool. Options are "anonymous", "name_only", "public"
:type privacy_level: str
:param consumer_key: The consumer key for the external tool
:type consumer_key: str
:param shared_secret: The shared secret with the external tool
:type shared_secret: str
:rtype: :class:`canvasapi.external_tool.ExternalTool`
"""
from canvasapi.external_tool import ExternalTool
response = self._requester.request(
"POST",
"accounts/{}/external_tools".format(self.id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"account_id": self.id})
return ExternalTool(self._requester, response_json)
|
https://github.com/ucfopen/canvasapi/issues/249
|
tool = {
"custom_fields": {
"canvas_membership_roles": "$Canvas.membership.roles"
},
"course_navigation": {
"default": "disabled",
"text": "Tool Name",
"enabled": "true",
"visibility": "admins",
"label": "Course Emailer",
"selection_width": 700,
"selection_height": 800
},
"consumer_key": "override_me",
"shared_secret": "override_me",
"selection_height": 700,
"selection_width": 800,
"privacy_level": "public",
"url": "https://tools.example.net/lti_launch",
"domain": "tools.example.net",
"name": "ToolName",
"description": "This LTI tool does stuff.",
}
# get an account instance...
A=canvas_test.canvas.get_account(account_id)
A.create_external_tool(**tool)
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "/Users/ialtgi01/PycharmProjects/canvas-refresh-fixer/env/lib/python3.5/site-packages/canvasapi/account.py", line 689, in create_external_tool
_kwargs=combine_kwargs(**kwargs)
File "/Users/ialtgi01/PycharmProjects/canvas-refresh-fixer/env/lib/python3.5/site-packages/canvasapi/requester.py", line 100, in request
raise BadRequest(response.text)
canvasapi.exceptions.BadRequest: {"errors":{"name":[{"attribute":"name","type":"blank","message":"blank"}],"consumer_key":[{"attribute":"consumer_key","type":"blank","message":"blank"}],"shared_secret":[{"attribute":"shared_secret","type":"blank","message":"blank"}]}}
|
canvasapi.exceptions.BadRequest
|
def __init__(self, base_url, access_token):
"""
:param base_url: The base URL of the Canvas instance's API.
:type base_url: str
:param access_token: The API key to authenticate requests with.
:type access_token: str
"""
new_url = get_institution_url(base_url)
if "api/v1" in base_url:
warnings.warn(
"`base_url` no longer requires an API version be specified. "
"Rewriting `base_url` to {}".format(new_url),
DeprecationWarning,
)
if "http://" in base_url:
warnings.warn(
"Canvas may respond unexpectedly when making requests to HTTP "
"URLs. If possible, please use HTTPS.",
UserWarning,
)
base_url = new_url + "/api/v1/"
self.__requester = Requester(base_url, access_token)
|
def __init__(self, base_url, access_token):
"""
:param base_url: The base URL of the Canvas instance's API.
:type base_url: str
:param access_token: The API key to authenticate requests with.
:type access_token: str
"""
new_url = get_institution_url(base_url)
if "api/v1" in base_url:
warnings.warn(
"`base_url` no longer requires an API version be specified. "
"Rewriting `base_url` to {}".format(new_url),
DeprecationWarning,
)
base_url = new_url + "/api/v1/"
self.__requester = Requester(base_url, access_token)
|
https://github.com/ucfopen/canvasapi/issues/183
|
a.create_course(course={'name': 'JSA_TST1337_Jesse_McBride'})
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/usr/local/lib/python2.7/site-packages/canvasapi/account.py", line 77, in create_course
return Course(self._requester, response.json())
File "/usr/local/lib/python2.7/site-packages/canvasapi/canvas_object.py", line 28, in __init__
self.set_attributes(attributes)
File "/usr/local/lib/python2.7/site-packages/canvasapi/canvas_object.py", line 68, in set_attributes
for attribute, value in attributes.items():
AttributeError: 'list' object has no attribute 'items'
|
AttributeError
|
def _attach_v2_specs(
task: _container_op.ContainerOp,
component_spec: _structures.ComponentSpec,
arguments: Mapping[str, Any],
) -> None:
"""Attaches v2 specs to a ContainerOp object.
Args:
task: The ContainerOp object to attach IR specs.
component_spec: The component spec object.
arguments: The dictionary of component arguments.
"""
# Attach v2_specs to the ContainerOp object regardless whether the pipeline is
# being compiled to v1 (Argo yaml) or v2 (IR json).
# However, there're different behaviors for the two cases. Namely, resolved
# commands and arguments, error handling, etc.
# Regarding the difference in error handling, v2 has a stricter requirement on
# input type annotation. For instance, an input without any type annotation is
# viewed as an artifact, and if it's paired with InputValuePlaceholder, an
# error will be thrown at compile time. However, we cannot raise such an error
# in v1, as it wouldn't break existing pipelines.
is_compiling_for_v2 = False
for frame in inspect.stack():
if "_create_pipeline_v2" in frame:
is_compiling_for_v2 = True
break
def _resolve_commands_and_args_v2(
component_spec: _structures.ComponentSpec,
arguments: Mapping[str, Any],
) -> _components._ResolvedCommandLineAndPaths:
"""Resolves the command line argument placeholders for v2 (IR).
Args:
component_spec: The component spec object.
arguments: The dictionary of component arguments.
Returns:
A named tuple: _components._ResolvedCommandLineAndPaths.
"""
inputs_dict = {
input_spec.name: input_spec for input_spec in component_spec.inputs or []
}
outputs_dict = {
output_spec.name: output_spec
for output_spec in component_spec.outputs or []
}
def _input_artifact_uri_placeholder(input_key: str) -> str:
if is_compiling_for_v2 and type_utils.is_parameter_type(
inputs_dict[input_key].type
):
raise TypeError(
'Input "{}" with type "{}" cannot be paired with '
"InputUriPlaceholder.".format(
input_key, inputs_dict[input_key].type
)
)
else:
return "{{{{$.inputs.artifacts['{}'].uri}}}}".format(input_key)
def _input_artifact_path_placeholder(input_key: str) -> str:
if is_compiling_for_v2 and type_utils.is_parameter_type(
inputs_dict[input_key].type
):
raise TypeError(
'Input "{}" with type "{}" cannot be paired with '
"InputPathPlaceholder.".format(
input_key, inputs_dict[input_key].type
)
)
elif is_compiling_for_v2 and input_key in importer_specs:
raise TypeError(
'Input "{}" with type "{}" is not connected to any upstream output. '
"However it is used with InputPathPlaceholder. "
"If you want to import an existing artifact using a system-connected"
" importer node, use InputUriPlaceholder instead. "
"Or if you just want to pass a string parameter, use string type and"
" InputValuePlaceholder instead.".format(
input_key, inputs_dict[input_key].type
)
)
else:
return "{{{{$.inputs.artifacts['{}'].path}}}}".format(input_key)
def _input_parameter_placeholder(input_key: str) -> str:
if is_compiling_for_v2 and not type_utils.is_parameter_type(
inputs_dict[input_key].type
):
raise TypeError(
'Input "{}" with type "{}" cannot be paired with '
"InputValuePlaceholder.".format(
input_key, inputs_dict[input_key].type
)
)
else:
return "{{{{$.inputs.parameters['{}']}}}}".format(input_key)
def _output_artifact_uri_placeholder(output_key: str) -> str:
if is_compiling_for_v2 and type_utils.is_parameter_type(
outputs_dict[output_key].type
):
raise TypeError(
'Output "{}" with type "{}" cannot be paired with '
"OutputUriPlaceholder.".format(
output_key, outputs_dict[output_key].type
)
)
else:
return "{{{{$.outputs.artifacts['{}'].uri}}}}".format(output_key)
def _output_artifact_path_placeholder(output_key: str) -> str:
return "{{{{$.outputs.artifacts['{}'].path}}}}".format(output_key)
def _output_parameter_path_placeholder(output_key: str) -> str:
return "{{{{$.outputs.parameters['{}'].output_file}}}}".format(output_key)
def _resolve_output_path_placeholder(output_key: str) -> str:
if type_utils.is_parameter_type(outputs_dict[output_key].type):
return _output_parameter_path_placeholder(output_key)
else:
return _output_artifact_path_placeholder(output_key)
resolved_cmd = _components._resolve_command_line_and_paths(
component_spec=component_spec,
arguments=arguments,
input_value_generator=_input_parameter_placeholder,
input_uri_generator=_input_artifact_uri_placeholder,
output_uri_generator=_output_artifact_uri_placeholder,
input_path_generator=_input_artifact_path_placeholder,
output_path_generator=_resolve_output_path_placeholder,
)
return resolved_cmd
pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec()
# Keep track of auto-injected importer spec.
importer_specs = {}
# Check types of the reference arguments and serialize PipelineParams
original_arguments = arguments
arguments = arguments.copy()
# Preserver input params for ContainerOp.inputs
input_params = list(
set(
[
param
for param in arguments.values()
if isinstance(param, _pipeline_param.PipelineParam)
]
)
)
for input_name, argument_value in arguments.items():
if isinstance(argument_value, _pipeline_param.PipelineParam):
input_type = component_spec._inputs_dict[input_name].type
reference_type = argument_value.param_type
types.verify_type_compatibility(
reference_type,
input_type,
'Incompatible argument passed to the input "{}" of component "{}": '.format(
input_name, component_spec.name
),
)
arguments[input_name] = str(argument_value)
if type_utils.is_parameter_type(input_type):
if argument_value.op_name:
pipeline_task_spec.inputs.parameters[
input_name
].task_output_parameter.producer_task = (
dsl_utils.sanitize_task_name(argument_value.op_name)
)
pipeline_task_spec.inputs.parameters[
input_name
].task_output_parameter.output_parameter_key = argument_value.name
else:
pipeline_task_spec.inputs.parameters[
input_name
].component_input_parameter = argument_value.name
else:
if argument_value.op_name:
pipeline_task_spec.inputs.artifacts[
input_name
].task_output_artifact.producer_task = dsl_utils.sanitize_task_name(
argument_value.op_name
)
pipeline_task_spec.inputs.artifacts[
input_name
].task_output_artifact.output_artifact_key = argument_value.name
elif is_compiling_for_v2:
# argument_value.op_name could be none, in which case an importer node
# will be inserted later.
# Importer node is only applicable for v2 engine.
pipeline_task_spec.inputs.artifacts[
input_name
].task_output_artifact.producer_task = ""
type_schema = type_utils.get_input_artifact_type_schema(
input_name, component_spec.inputs
)
importer_specs[input_name] = importer_node.build_importer_spec(
input_type_schema=type_schema,
pipeline_param_name=argument_value.name,
)
elif isinstance(argument_value, str):
pipeline_params = _pipeline_param.extract_pipelineparams_from_any(
argument_value
)
if pipeline_params and is_compiling_for_v2:
# argument_value contains PipelineParam placeholders.
raise NotImplementedError(
"Currently, a component input can only accept either a constant "
"value or a reference to another pipeline parameter. It cannot be a "
"combination of both. Got: {} for input {}".format(
argument_value, input_name
)
)
input_type = component_spec._inputs_dict[input_name].type
if type_utils.is_parameter_type(input_type):
pipeline_task_spec.inputs.parameters[
input_name
].runtime_value.constant_value.string_value = argument_value
elif is_compiling_for_v2:
# An importer node with constant value artifact_uri will be inserted.
# Importer node is only applicable for v2 engine.
pipeline_task_spec.inputs.artifacts[
input_name
].task_output_artifact.producer_task = ""
type_schema = type_utils.get_input_artifact_type_schema(
input_name, component_spec.inputs
)
importer_specs[input_name] = importer_node.build_importer_spec(
input_type_schema=type_schema, constant_value=argument_value
)
elif isinstance(argument_value, int):
pipeline_task_spec.inputs.parameters[
input_name
].runtime_value.constant_value.int_value = argument_value
elif isinstance(argument_value, float):
pipeline_task_spec.inputs.parameters[
input_name
].runtime_value.constant_value.double_value = argument_value
elif isinstance(argument_value, _container_op.ContainerOp):
raise TypeError(
"ContainerOp object {} was passed to component as an input argument. "
"Pass a single output instead.".format(input_name)
)
else:
if is_compiling_for_v2:
raise NotImplementedError(
"Input argument supports only the following types: PipelineParam"
', str, int, float. Got: "{}".'.format(argument_value)
)
if not component_spec.name:
component_spec.name = _components._default_component_name
# task.name is unique at this point.
pipeline_task_spec.task_info.name = dsl_utils.sanitize_task_name(task.name)
pipeline_task_spec.component_ref.name = dsl_utils.sanitize_component_name(
component_spec.name
)
task.task_spec = pipeline_task_spec
task.importer_specs = importer_specs
task.component_spec = dsl_component_spec.build_component_spec_from_structure(
component_spec
)
resolved_cmd = _resolve_commands_and_args_v2(
component_spec=component_spec, arguments=original_arguments
)
task.container_spec = (
pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec(
image=component_spec.implementation.container.image,
command=resolved_cmd.command,
args=resolved_cmd.args,
)
)
# Override command and arguments if compiling to v2.
if is_compiling_for_v2:
task.command = resolved_cmd.command
task.arguments = resolved_cmd.args
# limit this to v2 compiling only to avoid possible behavior change in v1.
task.inputs = input_params
|
def _attach_v2_specs(
task: _container_op.ContainerOp,
component_spec: _structures.ComponentSpec,
arguments: Mapping[str, Any],
) -> None:
"""Attaches v2 specs to a ContainerOp object.
Args:
task: The ContainerOp object to attach IR specs.
component_spec: The component spec object.
arguments: The dictionary of component arguments.
"""
# Attach v2_specs to the ContainerOp object regardless whether the pipeline is
# being compiled to v1 (Argo yaml) or v2 (IR json).
# However, there're different behaviors for the two cases. Namely, resolved
# commands and arguments, error handling, etc.
# Regarding the difference in error handling, v2 has a stricter requirement on
# input type annotation. For instance, an input without any type annotation is
# viewed as an artifact, and if it's paired with InputValuePlaceholder, an
# error will be thrown at compile time. However, we cannot raise such an error
# in v1, as it wouldn't break existing pipelines.
is_compiling_for_v2 = False
for frame in inspect.stack():
if "_create_pipeline_v2" in frame:
is_compiling_for_v2 = True
break
def _resolve_commands_and_args_v2(
component_spec: _structures.ComponentSpec,
arguments: Mapping[str, Any],
) -> _components._ResolvedCommandLineAndPaths:
"""Resolves the command line argument placeholders for v2 (IR).
Args:
component_spec: The component spec object.
arguments: The dictionary of component arguments.
Returns:
A named tuple: _components._ResolvedCommandLineAndPaths.
"""
inputs_dict = {
input_spec.name: input_spec for input_spec in component_spec.inputs or []
}
outputs_dict = {
output_spec.name: output_spec
for output_spec in component_spec.outputs or []
}
def _input_artifact_uri_placeholder(input_key: str) -> str:
if is_compiling_for_v2 and type_utils.is_parameter_type(
inputs_dict[input_key].type
):
raise TypeError(
'Input "{}" with type "{}" cannot be paired with '
"InputUriPlaceholder.".format(
input_key, inputs_dict[input_key].type
)
)
else:
return "{{{{$.inputs.artifacts['{}'].uri}}}}".format(input_key)
def _input_artifact_path_placeholder(input_key: str) -> str:
if is_compiling_for_v2 and type_utils.is_parameter_type(
inputs_dict[input_key].type
):
raise TypeError(
'Input "{}" with type "{}" cannot be paired with '
"InputPathPlaceholder.".format(
input_key, inputs_dict[input_key].type
)
)
elif is_compiling_for_v2 and input_key in importer_specs:
raise TypeError(
'Input "{}" with type "{}" is not connected to any upstream output. '
"However it is used with InputPathPlaceholder. "
"If you want to import an existing artifact using a system-connected"
" importer node, use InputUriPlaceholder instead. "
"Or if you just want to pass a string parameter, use string type and"
" InputValuePlaceholder instead.".format(
input_key, inputs_dict[input_key].type
)
)
else:
return "{{{{$.inputs.artifacts['{}'].path}}}}".format(input_key)
def _input_parameter_placeholder(input_key: str) -> str:
if is_compiling_for_v2 and not type_utils.is_parameter_type(
inputs_dict[input_key].type
):
raise TypeError(
'Input "{}" with type "{}" cannot be paired with '
"InputValuePlaceholder.".format(
input_key, inputs_dict[input_key].type
)
)
else:
return "{{{{$.inputs.parameters['{}']}}}}".format(input_key)
def _output_artifact_uri_placeholder(output_key: str) -> str:
if is_compiling_for_v2 and type_utils.is_parameter_type(
outputs_dict[output_key].type
):
raise TypeError(
'Output "{}" with type "{}" cannot be paired with '
"OutputUriPlaceholder.".format(
output_key, outputs_dict[output_key].type
)
)
else:
return "{{{{$.outputs.artifacts['{}'].uri}}}}".format(output_key)
def _output_artifact_path_placeholder(output_key: str) -> str:
return "{{{{$.outputs.artifacts['{}'].path}}}}".format(output_key)
def _output_parameter_path_placeholder(output_key: str) -> str:
return "{{{{$.outputs.parameters['{}'].output_file}}}}".format(output_key)
def _resolve_output_path_placeholder(output_key: str) -> str:
if type_utils.is_parameter_type(outputs_dict[output_key].type):
return _output_parameter_path_placeholder(output_key)
else:
return _output_artifact_path_placeholder(output_key)
resolved_cmd = _components._resolve_command_line_and_paths(
component_spec=component_spec,
arguments=arguments,
input_value_generator=_input_parameter_placeholder,
input_uri_generator=_input_artifact_uri_placeholder,
output_uri_generator=_output_artifact_uri_placeholder,
input_path_generator=_input_artifact_path_placeholder,
output_path_generator=_resolve_output_path_placeholder,
)
return resolved_cmd
pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec()
# Keep track of auto-injected importer spec.
importer_specs = {}
# Check types of the reference arguments and serialize PipelineParams
original_arguments = arguments
arguments = arguments.copy()
# Preserver input params for ContainerOp.inputs
input_params = list(
set(
[
param
for param in arguments.values()
if isinstance(param, _pipeline_param.PipelineParam)
]
)
)
for input_name, argument_value in arguments.items():
if isinstance(argument_value, _pipeline_param.PipelineParam):
input_type = component_spec._inputs_dict[input_name].type
reference_type = argument_value.param_type
types.verify_type_compatibility(
reference_type,
input_type,
'Incompatible argument passed to the input "{}" of component "{}": '.format(
input_name, component_spec.name
),
)
arguments[input_name] = str(argument_value)
if type_utils.is_parameter_type(input_type):
if argument_value.op_name:
pipeline_task_spec.inputs.parameters[
input_name
].task_output_parameter.producer_task = (
dsl_utils.sanitize_task_name(argument_value.op_name)
)
pipeline_task_spec.inputs.parameters[
input_name
].task_output_parameter.output_parameter_key = argument_value.name
else:
pipeline_task_spec.inputs.parameters[
input_name
].component_input_parameter = argument_value.name
else:
if argument_value.op_name:
pipeline_task_spec.inputs.artifacts[
input_name
].task_output_artifact.producer_task = dsl_utils.sanitize_task_name(
argument_value.op_name
)
pipeline_task_spec.inputs.artifacts[
input_name
].task_output_artifact.output_artifact_key = argument_value.name
else:
# argument_value.op_name could be none, in which case an importer node
# will be inserted later.
pipeline_task_spec.inputs.artifacts[
input_name
].task_output_artifact.producer_task = ""
type_schema = type_utils.get_input_artifact_type_schema(
input_name, component_spec.inputs
)
importer_specs[input_name] = importer_node.build_importer_spec(
input_type_schema=type_schema,
pipeline_param_name=argument_value.name,
)
elif isinstance(argument_value, str):
pipeline_params = _pipeline_param.extract_pipelineparams_from_any(
argument_value
)
if pipeline_params and is_compiling_for_v2:
# argument_value contains PipelineParam placeholders.
raise NotImplementedError(
"Currently, a component input can only accept either a constant "
"value or a reference to another pipeline parameter. It cannot be a "
"combination of both. Got: {} for input {}".format(
argument_value, input_name
)
)
input_type = component_spec._inputs_dict[input_name].type
if type_utils.is_parameter_type(input_type):
pipeline_task_spec.inputs.parameters[
input_name
].runtime_value.constant_value.string_value = argument_value
else:
# An importer node with constant value artifact_uri will be inserted.
pipeline_task_spec.inputs.artifacts[
input_name
].task_output_artifact.producer_task = ""
type_schema = type_utils.get_input_artifact_type_schema(
input_name, component_spec.inputs
)
importer_specs[input_name] = importer_node.build_importer_spec(
input_type_schema=type_schema, constant_value=argument_value
)
elif isinstance(argument_value, int):
pipeline_task_spec.inputs.parameters[
input_name
].runtime_value.constant_value.int_value = argument_value
elif isinstance(argument_value, float):
pipeline_task_spec.inputs.parameters[
input_name
].runtime_value.constant_value.double_value = argument_value
elif isinstance(argument_value, _container_op.ContainerOp):
raise TypeError(
"ContainerOp object {} was passed to component as an input argument. "
"Pass a single output instead.".format(input_name)
)
else:
if is_compiling_for_v2:
raise NotImplementedError(
"Input argument supports only the following types: PipelineParam"
', str, int, float. Got: "{}".'.format(argument_value)
)
if not component_spec.name:
component_spec.name = _components._default_component_name
# task.name is unique at this point.
pipeline_task_spec.task_info.name = dsl_utils.sanitize_task_name(task.name)
pipeline_task_spec.component_ref.name = dsl_utils.sanitize_component_name(
component_spec.name
)
task.task_spec = pipeline_task_spec
task.importer_specs = importer_specs
task.component_spec = dsl_component_spec.build_component_spec_from_structure(
component_spec
)
resolved_cmd = _resolve_commands_and_args_v2(
component_spec=component_spec, arguments=original_arguments
)
task.container_spec = (
pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec(
image=component_spec.implementation.container.image,
command=resolved_cmd.command,
args=resolved_cmd.args,
)
)
# Override command and arguments if compiling to v2.
if is_compiling_for_v2:
task.command = resolved_cmd.command
task.arguments = resolved_cmd.args
# limit this to v2 compiling only to avoid possible behavior change in v1.
task.inputs = input_params
|
https://github.com/kubeflow/pipelines/issues/5263
|
�[34msample-test-bg6w4-565595884: �[0;32mhttps://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/bigquery/query/component.yaml�[0m in �[0;36mBigquery - Query�[0;34m(query, project_id, dataset_id, table_id, output_gcs_path, dataset_location, job_config)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/components/_components.py�[0m in �[0;36mcreate_task_object_from_component_and_pythonic_arguments�[0;34m(pythonic_arguments)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 432�[0m �[0mcomponent_spec�[0m�[0;34m=�[0m�[0mcomponent_spec�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 433�[0m �[0marguments�[0m�[0;34m=�[0m�[0marguments�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0;32m--> 434�[0;31m �[0mcomponent_ref�[0m�[0;34m=�[0m�[0mcomponent_ref�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 435�[0m )�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 436�[0m �[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/components/_components.py�[0m in �[0;36m_create_task_object_from_component_and_arguments�[0;34m(component_spec, arguments, component_ref, **kwargs)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 380�[0m �[0marguments�[0m�[0;34m=�[0m�[0marguments�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 381�[0m �[0mcomponent_ref�[0m�[0;34m=�[0m�[0mcomponent_ref�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0;32m--> 382�[0;31m �[0;34m**�[0m�[0mkwargs�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 383�[0m )�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 384�[0m �[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/dsl/_component_bridge.py�[0m in �[0;36m_create_container_op_from_component_and_arguments�[0;34m(component_spec, arguments, component_ref)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 143�[0m �[0mtask�[0m�[0;34m.�[0m�[0mexecution_options�[0m�[0;34m.�[0m�[0mcaching_strategy�[0m�[0;34m.�[0m�[0mmax_cache_staleness�[0m �[0;34m=�[0m �[0;34m'P0D'�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 144�[0m �[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0;32m--> 145�[0;31m �[0m_attach_v2_specs�[0m�[0;34m(�[0m�[0mtask�[0m�[0;34m,�[0m �[0mcomponent_spec�[0m�[0;34m,�[0m �[0moriginal_arguments�[0m�[0;34m)�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 146�[0m �[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 147�[0m �[0;32mreturn�[0m �[0mtask�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/dsl/_component_bridge.py�[0m in �[0;36m_attach_v2_specs�[0;34m(task, component_spec, arguments)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 345�[0m input_name, component_spec.inputs)�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 346�[0m importer_specs[input_name] = importer_node.build_importer_spec(�[0m
�[34msample-test-bg6w4-565595884: �[0;32m--> 347�[0;31m input_type_schema=type_schema, constant_value=argument_value)�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 348�[0m �[0;32melif�[0m �[0misinstance�[0m�[0;34m(�[0m�[0margument_value�[0m�[0;34m,�[0m �[0mint�[0m�[0;34m)�[0m�[0;34m:�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 349�[0m pipeline_task_spec.inputs.parameters[�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/dsl/importer_node.py�[0m in �[0;36mbuild_importer_spec�[0;34m(input_type_schema, pipeline_param_name, constant_value)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 42�[0m """�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 43�[0m assert bool(pipeline_param_name) != bool(constant_value), (�[0m
�[34msample-test-bg6w4-565595884: �[0;32m---> 44�[0;31m �[0;34m'importer spec should be built using either pipeline_param_name or '�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 45�[0m 'constant_value.')�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 46�[0m �[0mimporter_spec�[0m �[0;34m=�[0m �[0mpipeline_spec_pb2�[0m�[0;34m.�[0m�[0mPipelineDeploymentConfig�[0m�[0;34m.�[0m�[0mImporterSpec�[0m�[0;34m(�[0m�[0;34m)�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;31mAssertionError�[0m: importer spec should be built using either pipeline_param_name or constant_value.�[0m
�[34msample-test-bg6w4-565595884: KFP API host is 3edeebea8e9ac0e0-dot-us-east1.pipelines.googleusercontent.com�[0m
�[34msample-test-bg6w4-565595884: Run the sample tests...�[0m
�[34msample-test-bg6w4-565595884: Traceback (most recent call last):�[0m
�[34msample-test-bg6w4-565595884: File "/python/src/github.com/kubeflow/pipelines/test/sample-test/sample_test_launcher.py", line 260, in <module>�[0m
�[34msample-test-bg6w4-565595884: main()�[0m
�[34msample-test-bg6w4-565595884: File "/python/src/github.com/kubeflow/pipelines/test/sample-test/sample_test_launcher.py", line 256, in main�[0m
�[34msample-test-bg6w4-565595884: 'component_test': ComponentTest�[0m
�[34msample-test-bg6w4-565595884: File "/usr/local/lib/python3.7/dist-packages/fire/core.py", line 141, in Fire�[0m
�[34msample-test-bg6w4-565595884: component_trace = _Fire(component, args, parsed_flag_args, context, name)�[0m
�[34msample-test-bg6w4-565595884: File "/usr/local/lib/python3.7/dist-packages/fire/core.py", line 471, in _Fire�[0m
�[34msample-test-bg6w4-565595884: target=component.__name__)�[0m
�[34msample-test-bg6w4-565595884: File "/usr/local/lib/python3.7/dist-packages/fire/core.py", line 681, in _CallAndUpdateTrace�[0m
�[34msample-test-bg6w4-565595884: component = fn(*varargs, **kwargs)�[0m
�[34msample-test-bg6w4-565595884: File "/python/src/github.com/kubeflow/pipelines/test/sample-test/sample_test_launcher.py", line 183, in run_test�[0m
�[34msample-test-bg6w4-565595884: nbchecker.check()�[0m
�[34msample-test-bg6w4-565595884: File "/python/src/github.com/kubeflow/pipelines/test/sample-test/check_notebook_results.py", line 74, in check�[0m
�[34msample-test-bg6w4-565595884: experiment_id = client.get_experiment(experiment_name=experiment).id�[0m
�[34msample-test-bg6w4-565595884: File "/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/_client.py", line 454, in get_experiment�[0m
�[34msample-test-bg6w4-565595884: raise ValueError('No experiment is found with name {}.'.format(experiment_name))�[0m
|
31mAssertionError
|
def build_importer_spec(
input_type_schema: str,
pipeline_param_name: Optional[str] = None,
constant_value: Optional[str] = None,
) -> pipeline_spec_pb2.PipelineDeploymentConfig.ImporterSpec:
"""Builds an importer executor spec.
Args:
input_type_schema: The type of the input artifact.
pipeline_param_name: The name of the pipeline parameter if the importer gets
its artifacts_uri via a pipeline parameter. This argument is mutually
exclusive with constant_value.
constant_value: The value of artifact_uri in case a contant value is passed
directly into the compoent op. This argument is mutually exclusive with
pipeline_param_name.
Returns:
An importer spec.
"""
assert bool(pipeline_param_name is None) != bool(constant_value is None), (
"importer spec should be built using either pipeline_param_name or "
"constant_value."
)
importer_spec = pipeline_spec_pb2.PipelineDeploymentConfig.ImporterSpec()
importer_spec.type_schema.instance_schema = input_type_schema
# TODO: subject to IR change on artifact_uri message type.
if pipeline_param_name:
importer_spec.artifact_uri.runtime_parameter = pipeline_param_name
elif constant_value:
importer_spec.artifact_uri.constant_value.string_value = constant_value
return importer_spec
|
def build_importer_spec(
input_type_schema: str,
pipeline_param_name: Optional[str] = None,
constant_value: Optional[str] = None,
) -> pipeline_spec_pb2.PipelineDeploymentConfig.ImporterSpec:
"""Builds an importer executor spec.
Args:
input_type_schema: The type of the input artifact.
pipeline_param_name: The name of the pipeline parameter if the importer gets
its artifacts_uri via a pipeline parameter. This argument is mutually
exclusive with constant_value.
constant_value: The value of artifact_uri in case a contant value is passed
directly into the compoent op. This argument is mutually exclusive with
pipeline_param_name.
Returns:
An importer spec.
"""
assert bool(pipeline_param_name) != bool(constant_value), (
"importer spec should be built using either pipeline_param_name or "
"constant_value."
)
importer_spec = pipeline_spec_pb2.PipelineDeploymentConfig.ImporterSpec()
importer_spec.type_schema.instance_schema = input_type_schema
# TODO: subject to IR change on artifact_uri message type.
if pipeline_param_name:
importer_spec.artifact_uri.runtime_parameter = pipeline_param_name
elif constant_value:
importer_spec.artifact_uri.constant_value.string_value = constant_value
return importer_spec
|
https://github.com/kubeflow/pipelines/issues/5263
|
�[34msample-test-bg6w4-565595884: �[0;32mhttps://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/bigquery/query/component.yaml�[0m in �[0;36mBigquery - Query�[0;34m(query, project_id, dataset_id, table_id, output_gcs_path, dataset_location, job_config)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/components/_components.py�[0m in �[0;36mcreate_task_object_from_component_and_pythonic_arguments�[0;34m(pythonic_arguments)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 432�[0m �[0mcomponent_spec�[0m�[0;34m=�[0m�[0mcomponent_spec�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 433�[0m �[0marguments�[0m�[0;34m=�[0m�[0marguments�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0;32m--> 434�[0;31m �[0mcomponent_ref�[0m�[0;34m=�[0m�[0mcomponent_ref�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 435�[0m )�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 436�[0m �[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/components/_components.py�[0m in �[0;36m_create_task_object_from_component_and_arguments�[0;34m(component_spec, arguments, component_ref, **kwargs)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 380�[0m �[0marguments�[0m�[0;34m=�[0m�[0marguments�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 381�[0m �[0mcomponent_ref�[0m�[0;34m=�[0m�[0mcomponent_ref�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0;32m--> 382�[0;31m �[0;34m**�[0m�[0mkwargs�[0m�[0;34m,�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 383�[0m )�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 384�[0m �[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/dsl/_component_bridge.py�[0m in �[0;36m_create_container_op_from_component_and_arguments�[0;34m(component_spec, arguments, component_ref)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 143�[0m �[0mtask�[0m�[0;34m.�[0m�[0mexecution_options�[0m�[0;34m.�[0m�[0mcaching_strategy�[0m�[0;34m.�[0m�[0mmax_cache_staleness�[0m �[0;34m=�[0m �[0;34m'P0D'�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 144�[0m �[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0;32m--> 145�[0;31m �[0m_attach_v2_specs�[0m�[0;34m(�[0m�[0mtask�[0m�[0;34m,�[0m �[0mcomponent_spec�[0m�[0;34m,�[0m �[0moriginal_arguments�[0m�[0;34m)�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 146�[0m �[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 147�[0m �[0;32mreturn�[0m �[0mtask�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/dsl/_component_bridge.py�[0m in �[0;36m_attach_v2_specs�[0;34m(task, component_spec, arguments)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 345�[0m input_name, component_spec.inputs)�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 346�[0m importer_specs[input_name] = importer_node.build_importer_spec(�[0m
�[34msample-test-bg6w4-565595884: �[0;32m--> 347�[0;31m input_type_schema=type_schema, constant_value=argument_value)�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 348�[0m �[0;32melif�[0m �[0misinstance�[0m�[0;34m(�[0m�[0margument_value�[0m�[0;34m,�[0m �[0mint�[0m�[0;34m)�[0m�[0;34m:�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 349�[0m pipeline_task_spec.inputs.parameters[�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;32m/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/dsl/importer_node.py�[0m in �[0;36mbuild_importer_spec�[0;34m(input_type_schema, pipeline_param_name, constant_value)�[0m�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 42�[0m """�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 43�[0m assert bool(pipeline_param_name) != bool(constant_value), (�[0m
�[34msample-test-bg6w4-565595884: �[0;32m---> 44�[0;31m �[0;34m'importer spec should be built using either pipeline_param_name or '�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m�[1;32m 45�[0m 'constant_value.')�[0m
�[34msample-test-bg6w4-565595884: �[1;32m 46�[0m �[0mimporter_spec�[0m �[0;34m=�[0m �[0mpipeline_spec_pb2�[0m�[0;34m.�[0m�[0mPipelineDeploymentConfig�[0m�[0;34m.�[0m�[0mImporterSpec�[0m�[0;34m(�[0m�[0;34m)�[0m�[0;34m�[0m�[0;34m�[0m�[0m�[0m
�[34msample-test-bg6w4-565595884: �[0m
�[34msample-test-bg6w4-565595884: �[0;31mAssertionError�[0m: importer spec should be built using either pipeline_param_name or constant_value.�[0m
�[34msample-test-bg6w4-565595884: KFP API host is 3edeebea8e9ac0e0-dot-us-east1.pipelines.googleusercontent.com�[0m
�[34msample-test-bg6w4-565595884: Run the sample tests...�[0m
�[34msample-test-bg6w4-565595884: Traceback (most recent call last):�[0m
�[34msample-test-bg6w4-565595884: File "/python/src/github.com/kubeflow/pipelines/test/sample-test/sample_test_launcher.py", line 260, in <module>�[0m
�[34msample-test-bg6w4-565595884: main()�[0m
�[34msample-test-bg6w4-565595884: File "/python/src/github.com/kubeflow/pipelines/test/sample-test/sample_test_launcher.py", line 256, in main�[0m
�[34msample-test-bg6w4-565595884: 'component_test': ComponentTest�[0m
�[34msample-test-bg6w4-565595884: File "/usr/local/lib/python3.7/dist-packages/fire/core.py", line 141, in Fire�[0m
�[34msample-test-bg6w4-565595884: component_trace = _Fire(component, args, parsed_flag_args, context, name)�[0m
�[34msample-test-bg6w4-565595884: File "/usr/local/lib/python3.7/dist-packages/fire/core.py", line 471, in _Fire�[0m
�[34msample-test-bg6w4-565595884: target=component.__name__)�[0m
�[34msample-test-bg6w4-565595884: File "/usr/local/lib/python3.7/dist-packages/fire/core.py", line 681, in _CallAndUpdateTrace�[0m
�[34msample-test-bg6w4-565595884: component = fn(*varargs, **kwargs)�[0m
�[34msample-test-bg6w4-565595884: File "/python/src/github.com/kubeflow/pipelines/test/sample-test/sample_test_launcher.py", line 183, in run_test�[0m
�[34msample-test-bg6w4-565595884: nbchecker.check()�[0m
�[34msample-test-bg6w4-565595884: File "/python/src/github.com/kubeflow/pipelines/test/sample-test/check_notebook_results.py", line 74, in check�[0m
�[34msample-test-bg6w4-565595884: experiment_id = client.get_experiment(experiment_name=experiment).id�[0m
�[34msample-test-bg6w4-565595884: File "/usr/local/lib/python3.7/dist-packages/kfp-1.4.1-py3.7.egg/kfp/_client.py", line 454, in get_experiment�[0m
�[34msample-test-bg6w4-565595884: raise ValueError('No experiment is found with name {}.'.format(experiment_name))�[0m
|
31mAssertionError
|
def _capture_function_code_using_source_copy(func) -> str:
func_code = inspect.getsource(func)
# Function might be defined in some indented scope (e.g. in another function).
# We need to handle this and properly dedent the function source code
func_code = textwrap.dedent(func_code)
func_code_lines = func_code.split("\n")
# Removing possible decorators (can be multiline) until the function definition is found
while func_code_lines and not func_code_lines[0].startswith("def "):
del func_code_lines[0]
if not func_code_lines:
raise ValueError(
'Failed to dedent and clean up the source of function "{}". It is probably not properly indented.'.format(
func.__name__
)
)
func_code = "\n".join(func_code_lines)
# Stripping type annotations to prevent import errors.
# The most common cases are InputPath/OutputPath and typing.NamedTuple annotations
func_code = strip_type_hints(func_code)
return func_code
|
def _capture_function_code_using_source_copy(func) -> str:
import textwrap
func_code = inspect.getsource(func)
# Function might be defined in some indented scope (e.g. in another function).
# We need to handle this and properly dedent the function source code
func_code = textwrap.dedent(func_code)
func_code_lines = func_code.split("\n")
# Removing possible decorators (can be multiline) until the function definition is found
while func_code_lines and not func_code_lines[0].startswith("def "):
del func_code_lines[0]
if not func_code_lines:
raise ValueError(
'Failed to dedent and clean up the source of function "{}". It is probably not properly indented.'.format(
func.__name__
)
)
func_code = "\n".join(func_code_lines)
# Stripping type annotations to prevent import errors.
# The most common cases are InputPath/OutputPath and typing.NamedTuple annotations
func_code = strip_type_hints(func_code)
return func_code
|
https://github.com/kubeflow/pipelines/issues/4849
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.8/site-packages/torch/_utils_internal.py", line 49, in get_source_lines_and_file
sourcelines, file_lineno = inspect.getsourcelines(obj)
File "/opt/conda/lib/python3.8/inspect.py", line 967, in getsourcelines
lines, lnum = findsource(object)
File "/opt/conda/lib/python3.8/inspect.py", line 798, in findsource
raise OSError('could not get source code')
OSError: could not get source code
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "<string>", line 55, in <module>
File "<string>", line 44, in create_fully_connected_pytorch_network
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_script.py", line 897, in script
return torch.jit._recursive.create_script_module(
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 352, in create_script_module
return create_script_module_impl(nn_module, concrete_type, stubs_fn)
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 364, in create_script_module_impl
method_stubs = stubs_fn(nn_module)
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 591, in infer_methods_to_compile
stubs.append(make_stub_from_method(nn_module, method))
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 52, in make_stub_from_method
return make_stub(func, method_name)
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 37, in make_stub
ast = get_jit_def(func, name, self_name="RecursiveScriptModule")
File "/opt/conda/lib/python3.8/site-packages/torch/jit/frontend.py", line 197, in get_jit_def
sourcelines, file_lineno, filename = get_source_lines_and_file(fn, torch._C.ErrorReport.call_stack())
File "/opt/conda/lib/python3.8/site-packages/torch/_utils_internal.py", line 56, in get_source_lines_and_file
raise OSError(msg) from e
OSError: Can't get source for <bound method create_fully_connected_pytorch_network.<locals>.FullyConnectedNetwork.forward of FullyConnectedNetwork(
(layers): ModuleList(
(0): Linear(in_features=10, out_features=100, bias=True)
(1): Linear(in_features=100, out_features=10, bias=True)
(2): Linear(in_features=10, out_features=1, bias=True)
)
)>. TorchScript requires source access in order to carry out compilation, make sure original .py files are available.
|
OSError
|
def _func_to_component_spec(
func,
extra_code="",
base_image: str = None,
packages_to_install: List[str] = None,
modules_to_capture: List[str] = None,
use_code_pickling=False,
) -> ComponentSpec:
"""Takes a self-contained python function and converts it to component.
Args:
func: Required. The function to be converted
base_image: Optional. Docker image to be used as a base image for the python component. Must have python 3.5+ installed. Default is python:3.7
Note: The image can also be specified by decorating the function with the @python_component decorator. If different base images are explicitly specified in both places, an error is raised.
extra_code: Optional. Python source code that gets placed before the function code. Can be used as workaround to define types used in function signature.
packages_to_install: Optional. List of [versioned] python packages to pip install before executing the user function.
modules_to_capture: Optional. List of module names that will be captured (instead of just referencing) during the dependency scan. By default the :code:`func.__module__` is captured.
use_code_pickling: Specifies whether the function code should be captured using pickling as opposed to source code manipulation. Pickling has better support for capturing dependencies, but is sensitive to version mismatch between python in component creation environment and runtime image.
Returns:
A :py:class:`kfp.components.structures.ComponentSpec` instance.
"""
decorator_base_image = getattr(func, "_component_base_image", None)
if decorator_base_image is not None:
if base_image is not None and decorator_base_image != base_image:
raise ValueError(
"base_image ({}) conflicts with the decorator-specified base image metadata ({})".format(
base_image, decorator_base_image
)
)
else:
base_image = decorator_base_image
else:
if base_image is None:
base_image = default_base_image_or_builder
if isinstance(base_image, Callable):
base_image = base_image()
packages_to_install = packages_to_install or []
component_spec = _extract_component_interface(func)
component_inputs = component_spec.inputs or []
component_outputs = component_spec.outputs or []
arguments = []
arguments.extend(InputValuePlaceholder(input.name) for input in component_inputs)
arguments.extend(OutputPathPlaceholder(output.name) for output in component_outputs)
if use_code_pickling:
func_code = _capture_function_code_using_cloudpickle(func, modules_to_capture)
# pip startup is quite slow. TODO: Remove the special cloudpickle installation code in favor of the the following line once a way to speed up pip startup is discovered.
# packages_to_install.append('cloudpickle==1.1.1')
else:
func_code = _capture_function_code_using_source_copy(func)
definitions = set()
def get_deserializer_and_register_definitions(type_name):
deserializer_code = get_deserializer_code_for_type_struct(type_name)
if deserializer_code:
(deserializer_code_str, definition_str) = deserializer_code
if definition_str:
definitions.add(definition_str)
return deserializer_code_str
return "str"
pre_func_definitions = set()
def get_argparse_type_for_input_file(passing_style):
if passing_style is None:
return None
if passing_style is InputPath:
return "str"
elif passing_style is InputTextFile:
return "argparse.FileType('rt')"
elif passing_style is InputBinaryFile:
return "argparse.FileType('rb')"
# For Output* we cannot use the build-in argparse.FileType objects since they do not create parent directories.
elif passing_style is OutputPath:
# ~= return 'str'
pre_func_definitions.add(
inspect.getsource(_make_parent_dirs_and_return_path)
)
return _make_parent_dirs_and_return_path.__name__
elif passing_style is OutputTextFile:
# ~= return "argparse.FileType('wt')"
pre_func_definitions.add(
inspect.getsource(_parent_dirs_maker_that_returns_open_file)
)
return _parent_dirs_maker_that_returns_open_file.__name__ + "('wt')"
elif passing_style is OutputBinaryFile:
# ~= return "argparse.FileType('wb')"
pre_func_definitions.add(
inspect.getsource(_parent_dirs_maker_that_returns_open_file)
)
return _parent_dirs_maker_that_returns_open_file.__name__ + "('wb')"
raise NotImplementedError(
'Unexpected data passing style: "{}".'.format(str(passing_style))
)
def get_serializer_and_register_definitions(type_name) -> str:
serializer_func = get_serializer_func_for_type_struct(type_name)
if serializer_func:
# If serializer is not part of the standard python library, then include its code in the generated program
if hasattr(
serializer_func, "__module__"
) and not _module_is_builtin_or_standard(serializer_func.__module__):
import inspect
serializer_code_str = inspect.getsource(serializer_func)
definitions.add(serializer_code_str)
return serializer_func.__name__
return "str"
arg_parse_code_lines = [
"import argparse",
"_parser = argparse.ArgumentParser(prog={prog_repr}, description={description_repr})".format(
prog_repr=repr(component_spec.name or ""),
description_repr=repr(component_spec.description or ""),
),
]
outputs_passed_through_func_return_tuple = [
output for output in component_outputs if output._passing_style is None
]
file_outputs_passed_using_func_parameters = [
output for output in component_outputs if output._passing_style is not None
]
arguments = []
for input in component_inputs + file_outputs_passed_using_func_parameters:
param_flag = "--" + input.name.replace("_", "-")
is_required = isinstance(input, OutputSpec) or not input.optional
line = '_parser.add_argument("{param_flag}", dest="{param_var}", type={param_type}, required={is_required}, default=argparse.SUPPRESS)'.format(
param_flag=param_flag,
param_var=input._parameter_name, # Not input.name, since the inputs could have been renamed
param_type=get_argparse_type_for_input_file(input._passing_style)
or get_deserializer_and_register_definitions(input.type),
is_required=str(is_required),
)
arg_parse_code_lines.append(line)
if input._passing_style in [InputPath, InputTextFile, InputBinaryFile]:
arguments_for_input = [param_flag, InputPathPlaceholder(input.name)]
elif input._passing_style in [OutputPath, OutputTextFile, OutputBinaryFile]:
arguments_for_input = [param_flag, OutputPathPlaceholder(input.name)]
else:
arguments_for_input = [param_flag, InputValuePlaceholder(input.name)]
if is_required:
arguments.extend(arguments_for_input)
else:
arguments.append(
IfPlaceholder(
IfPlaceholderStructure(
condition=IsPresentPlaceholder(input.name),
then_value=arguments_for_input,
)
)
)
if outputs_passed_through_func_return_tuple:
param_flag = "----output-paths"
output_param_var = "_output_paths"
line = '_parser.add_argument("{param_flag}", dest="{param_var}", type=str, nargs={nargs})'.format(
param_flag=param_flag,
param_var=output_param_var,
nargs=len(outputs_passed_through_func_return_tuple),
)
arg_parse_code_lines.append(line)
arguments.append(param_flag)
arguments.extend(
OutputPathPlaceholder(output.name)
for output in outputs_passed_through_func_return_tuple
)
output_serialization_expression_strings = []
for output in outputs_passed_through_func_return_tuple:
serializer_call_str = get_serializer_and_register_definitions(output.type)
output_serialization_expression_strings.append(serializer_call_str)
pre_func_code = "\n".join(list(pre_func_definitions))
arg_parse_code_lines = list(definitions) + arg_parse_code_lines
arg_parse_code_lines.append(
"_parsed_args = vars(_parser.parse_args())",
)
if outputs_passed_through_func_return_tuple:
arg_parse_code_lines.append(
'_output_files = _parsed_args.pop("_output_paths", [])',
)
# Putting singular return values in a list to be "zipped" with the serializers and output paths
outputs_to_list_code = ""
return_ann = inspect.signature(func).return_annotation
if ( # The return type is singular, not sequence
return_ann is not None
and return_ann != inspect.Parameter.empty
and not isinstance(return_ann, dict)
and not hasattr(return_ann, "_fields") # namedtuple
):
outputs_to_list_code = "_outputs = [_outputs]"
output_serialization_code = "".join(
" {},\n".format(s) for s in output_serialization_expression_strings
)
full_output_handling_code = """
{outputs_to_list_code}
_output_serializers = [
{output_serialization_code}
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
""".format(
output_serialization_code=output_serialization_code,
outputs_to_list_code=outputs_to_list_code,
)
full_source = """\
{pre_func_code}
{extra_code}
{func_code}
{arg_parse_code}
_outputs = {func_name}(**_parsed_args)
""".format(
func_name=func.__name__,
func_code=func_code,
pre_func_code=pre_func_code,
extra_code=extra_code,
arg_parse_code="\n".join(arg_parse_code_lines),
)
if outputs_passed_through_func_return_tuple:
full_source += full_output_handling_code
# Removing consecutive blank lines
import re
full_source = re.sub("\n\n\n+", "\n\n", full_source).strip("\n") + "\n"
package_preinstallation_command = []
if packages_to_install:
package_install_command_line = "PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location {}".format(
" ".join([repr(str(package)) for package in packages_to_install])
)
package_preinstallation_command = [
"sh",
"-c",
'({pip_install} || {pip_install} --user) && "$0" "$@"'.format(
pip_install=package_install_command_line
),
]
component_spec.implementation = ContainerImplementation(
container=ContainerSpec(
image=base_image,
command=package_preinstallation_command
+ [
"sh",
"-ec",
# Writing the program code to a file.
# This is needed for Python to show stack traces and for `inspect.getsource` to work (used by PyTorch JIT and this module for example).
textwrap.dedent("""\
program_path=$(mktemp)
echo -n "$0" > "$program_path"
python3 -u "$program_path" "$@"
"""),
full_source,
],
args=arguments,
)
)
return component_spec
|
def _func_to_component_spec(
func,
extra_code="",
base_image: str = None,
packages_to_install: List[str] = None,
modules_to_capture: List[str] = None,
use_code_pickling=False,
) -> ComponentSpec:
"""Takes a self-contained python function and converts it to component.
Args:
func: Required. The function to be converted
base_image: Optional. Docker image to be used as a base image for the python component. Must have python 3.5+ installed. Default is python:3.7
Note: The image can also be specified by decorating the function with the @python_component decorator. If different base images are explicitly specified in both places, an error is raised.
extra_code: Optional. Python source code that gets placed before the function code. Can be used as workaround to define types used in function signature.
packages_to_install: Optional. List of [versioned] python packages to pip install before executing the user function.
modules_to_capture: Optional. List of module names that will be captured (instead of just referencing) during the dependency scan. By default the :code:`func.__module__` is captured.
use_code_pickling: Specifies whether the function code should be captured using pickling as opposed to source code manipulation. Pickling has better support for capturing dependencies, but is sensitive to version mismatch between python in component creation environment and runtime image.
Returns:
A :py:class:`kfp.components.structures.ComponentSpec` instance.
"""
decorator_base_image = getattr(func, "_component_base_image", None)
if decorator_base_image is not None:
if base_image is not None and decorator_base_image != base_image:
raise ValueError(
"base_image ({}) conflicts with the decorator-specified base image metadata ({})".format(
base_image, decorator_base_image
)
)
else:
base_image = decorator_base_image
else:
if base_image is None:
base_image = default_base_image_or_builder
if isinstance(base_image, Callable):
base_image = base_image()
packages_to_install = packages_to_install or []
component_spec = _extract_component_interface(func)
component_inputs = component_spec.inputs or []
component_outputs = component_spec.outputs or []
arguments = []
arguments.extend(InputValuePlaceholder(input.name) for input in component_inputs)
arguments.extend(OutputPathPlaceholder(output.name) for output in component_outputs)
if use_code_pickling:
func_code = _capture_function_code_using_cloudpickle(func, modules_to_capture)
# pip startup is quite slow. TODO: Remove the special cloudpickle installation code in favor of the the following line once a way to speed up pip startup is discovered.
# packages_to_install.append('cloudpickle==1.1.1')
else:
func_code = _capture_function_code_using_source_copy(func)
definitions = set()
def get_deserializer_and_register_definitions(type_name):
deserializer_code = get_deserializer_code_for_type_struct(type_name)
if deserializer_code:
(deserializer_code_str, definition_str) = deserializer_code
if definition_str:
definitions.add(definition_str)
return deserializer_code_str
return "str"
pre_func_definitions = set()
def get_argparse_type_for_input_file(passing_style):
if passing_style is None:
return None
if passing_style is InputPath:
return "str"
elif passing_style is InputTextFile:
return "argparse.FileType('rt')"
elif passing_style is InputBinaryFile:
return "argparse.FileType('rb')"
# For Output* we cannot use the build-in argparse.FileType objects since they do not create parent directories.
elif passing_style is OutputPath:
# ~= return 'str'
pre_func_definitions.add(
inspect.getsource(_make_parent_dirs_and_return_path)
)
return _make_parent_dirs_and_return_path.__name__
elif passing_style is OutputTextFile:
# ~= return "argparse.FileType('wt')"
pre_func_definitions.add(
inspect.getsource(_parent_dirs_maker_that_returns_open_file)
)
return _parent_dirs_maker_that_returns_open_file.__name__ + "('wt')"
elif passing_style is OutputBinaryFile:
# ~= return "argparse.FileType('wb')"
pre_func_definitions.add(
inspect.getsource(_parent_dirs_maker_that_returns_open_file)
)
return _parent_dirs_maker_that_returns_open_file.__name__ + "('wb')"
raise NotImplementedError(
'Unexpected data passing style: "{}".'.format(str(passing_style))
)
def get_serializer_and_register_definitions(type_name) -> str:
serializer_func = get_serializer_func_for_type_struct(type_name)
if serializer_func:
# If serializer is not part of the standard python library, then include its code in the generated program
if hasattr(
serializer_func, "__module__"
) and not _module_is_builtin_or_standard(serializer_func.__module__):
import inspect
serializer_code_str = inspect.getsource(serializer_func)
definitions.add(serializer_code_str)
return serializer_func.__name__
return "str"
arg_parse_code_lines = [
"import argparse",
"_parser = argparse.ArgumentParser(prog={prog_repr}, description={description_repr})".format(
prog_repr=repr(component_spec.name or ""),
description_repr=repr(component_spec.description or ""),
),
]
outputs_passed_through_func_return_tuple = [
output for output in component_outputs if output._passing_style is None
]
file_outputs_passed_using_func_parameters = [
output for output in component_outputs if output._passing_style is not None
]
arguments = []
for input in component_inputs + file_outputs_passed_using_func_parameters:
param_flag = "--" + input.name.replace("_", "-")
is_required = isinstance(input, OutputSpec) or not input.optional
line = '_parser.add_argument("{param_flag}", dest="{param_var}", type={param_type}, required={is_required}, default=argparse.SUPPRESS)'.format(
param_flag=param_flag,
param_var=input._parameter_name, # Not input.name, since the inputs could have been renamed
param_type=get_argparse_type_for_input_file(input._passing_style)
or get_deserializer_and_register_definitions(input.type),
is_required=str(is_required),
)
arg_parse_code_lines.append(line)
if input._passing_style in [InputPath, InputTextFile, InputBinaryFile]:
arguments_for_input = [param_flag, InputPathPlaceholder(input.name)]
elif input._passing_style in [OutputPath, OutputTextFile, OutputBinaryFile]:
arguments_for_input = [param_flag, OutputPathPlaceholder(input.name)]
else:
arguments_for_input = [param_flag, InputValuePlaceholder(input.name)]
if is_required:
arguments.extend(arguments_for_input)
else:
arguments.append(
IfPlaceholder(
IfPlaceholderStructure(
condition=IsPresentPlaceholder(input.name),
then_value=arguments_for_input,
)
)
)
if outputs_passed_through_func_return_tuple:
param_flag = "----output-paths"
output_param_var = "_output_paths"
line = '_parser.add_argument("{param_flag}", dest="{param_var}", type=str, nargs={nargs})'.format(
param_flag=param_flag,
param_var=output_param_var,
nargs=len(outputs_passed_through_func_return_tuple),
)
arg_parse_code_lines.append(line)
arguments.append(param_flag)
arguments.extend(
OutputPathPlaceholder(output.name)
for output in outputs_passed_through_func_return_tuple
)
output_serialization_expression_strings = []
for output in outputs_passed_through_func_return_tuple:
serializer_call_str = get_serializer_and_register_definitions(output.type)
output_serialization_expression_strings.append(serializer_call_str)
pre_func_code = "\n".join(list(pre_func_definitions))
arg_parse_code_lines = list(definitions) + arg_parse_code_lines
arg_parse_code_lines.append(
"_parsed_args = vars(_parser.parse_args())",
)
if outputs_passed_through_func_return_tuple:
arg_parse_code_lines.append(
'_output_files = _parsed_args.pop("_output_paths", [])',
)
# Putting singular return values in a list to be "zipped" with the serializers and output paths
outputs_to_list_code = ""
return_ann = inspect.signature(func).return_annotation
if ( # The return type is singular, not sequence
return_ann is not None
and return_ann != inspect.Parameter.empty
and not isinstance(return_ann, dict)
and not hasattr(return_ann, "_fields") # namedtuple
):
outputs_to_list_code = "_outputs = [_outputs]"
output_serialization_code = "".join(
" {},\n".format(s) for s in output_serialization_expression_strings
)
full_output_handling_code = """
{outputs_to_list_code}
_output_serializers = [
{output_serialization_code}
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
""".format(
output_serialization_code=output_serialization_code,
outputs_to_list_code=outputs_to_list_code,
)
full_source = """\
{pre_func_code}
{extra_code}
{func_code}
{arg_parse_code}
_outputs = {func_name}(**_parsed_args)
""".format(
func_name=func.__name__,
func_code=func_code,
pre_func_code=pre_func_code,
extra_code=extra_code,
arg_parse_code="\n".join(arg_parse_code_lines),
)
if outputs_passed_through_func_return_tuple:
full_source += full_output_handling_code
# Removing consecutive blank lines
import re
full_source = re.sub("\n\n\n+", "\n\n", full_source).strip("\n") + "\n"
package_preinstallation_command = []
if packages_to_install:
package_install_command_line = "PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location {}".format(
" ".join([repr(str(package)) for package in packages_to_install])
)
package_preinstallation_command = [
"sh",
"-c",
'({pip_install} || {pip_install} --user) && "$0" "$@"'.format(
pip_install=package_install_command_line
),
]
component_spec.implementation = ContainerImplementation(
container=ContainerSpec(
image=base_image,
command=package_preinstallation_command
+ ["python3", "-u", "-c", full_source],
args=arguments,
)
)
return component_spec
|
https://github.com/kubeflow/pipelines/issues/4849
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.8/site-packages/torch/_utils_internal.py", line 49, in get_source_lines_and_file
sourcelines, file_lineno = inspect.getsourcelines(obj)
File "/opt/conda/lib/python3.8/inspect.py", line 967, in getsourcelines
lines, lnum = findsource(object)
File "/opt/conda/lib/python3.8/inspect.py", line 798, in findsource
raise OSError('could not get source code')
OSError: could not get source code
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "<string>", line 55, in <module>
File "<string>", line 44, in create_fully_connected_pytorch_network
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_script.py", line 897, in script
return torch.jit._recursive.create_script_module(
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 352, in create_script_module
return create_script_module_impl(nn_module, concrete_type, stubs_fn)
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 364, in create_script_module_impl
method_stubs = stubs_fn(nn_module)
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 591, in infer_methods_to_compile
stubs.append(make_stub_from_method(nn_module, method))
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 52, in make_stub_from_method
return make_stub(func, method_name)
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 37, in make_stub
ast = get_jit_def(func, name, self_name="RecursiveScriptModule")
File "/opt/conda/lib/python3.8/site-packages/torch/jit/frontend.py", line 197, in get_jit_def
sourcelines, file_lineno, filename = get_source_lines_and_file(fn, torch._C.ErrorReport.call_stack())
File "/opt/conda/lib/python3.8/site-packages/torch/_utils_internal.py", line 56, in get_source_lines_and_file
raise OSError(msg) from e
OSError: Can't get source for <bound method create_fully_connected_pytorch_network.<locals>.FullyConnectedNetwork.forward of FullyConnectedNetwork(
(layers): ModuleList(
(0): Linear(in_features=10, out_features=100, bias=True)
(1): Linear(in_features=100, out_features=10, bias=True)
(2): Linear(in_features=10, out_features=1, bias=True)
)
)>. TorchScript requires source access in order to carry out compilation, make sure original .py files are available.
|
OSError
|
def build_python_component(
component_func,
target_image,
base_image=None,
dependency=[],
staging_gcs_path=None,
timeout=600,
namespace=None,
target_component_file=None,
python_version="python3",
):
"""build_component automatically builds a container image for the component_func based on the base_image and pushes to the target_image.
Args:
component_func (python function): The python function to build components upon
base_image (str): Docker image to use as a base image
target_image (str): Full URI to push the target image
staging_gcs_path (str): GCS blob that can store temporary build files
target_image (str): target image path
timeout (int): the timeout for the image build(in secs), default is 600 seconds
namespace (str): the namespace within which to run the kubernetes kaniko job. If the
job is running on GKE and value is None the underlying functions will use the default namespace from GKE. .
dependency (list): a list of VersionedDependency, which includes the package name and versions, default is empty
python_version (str): choose python2 or python3, default is python3
Raises:
ValueError: The function is not decorated with python_component decorator or the python_version is neither python2 nor python3
"""
_configure_logger(logging.getLogger())
if component_func is None:
raise ValueError("component_func must not be None")
if target_image is None:
raise ValueError("target_image must not be None")
if python_version not in ["python2", "python3"]:
raise ValueError("python_version has to be either python2 or python3")
if staging_gcs_path is None:
raise ValueError("staging_gcs_path must not be None")
if base_image is None:
base_image = getattr(component_func, "_component_base_image", None)
if base_image is None:
from ..components._python_op import default_base_image_or_builder
base_image = default_base_image_or_builder
if isinstance(base_image, Callable):
base_image = base_image()
logging.info(
"Build an image that is based on "
+ base_image
+ " and push the image to "
+ target_image
)
component_spec = _func_to_component_spec(component_func, base_image=base_image)
command_line_args = component_spec.implementation.container.command
program_launcher_index = command_line_args.index(
'program_path=$(mktemp)\necho -n "$0" > "$program_path"\npython3 -u "$program_path" "$@"\n'
)
assert program_launcher_index in [2, 3]
program_code_index = program_launcher_index + 1
program_code = command_line_args[program_code_index]
program_rel_path = "ml/main.py"
program_container_path = "/" + program_rel_path
# Replacing the inline code with calling a local program
# Before: sh -ec '... && python3 -u ...' 'import sys ...' --param1 ...
# After: python3 -u main.py --param1 ...
command_line_args[program_code_index] = program_container_path
command_line_args.pop(program_launcher_index)
command_line_args[program_launcher_index - 1] = "-u" # -ec => -u
command_line_args[program_launcher_index - 2] = python_version # sh => python3
if python_version == "python2":
import warnings
warnings.warn("Python2 is not longer supported")
arc_docker_filename = "Dockerfile"
arc_requirement_filename = "requirements.txt"
with tempfile.TemporaryDirectory() as local_build_dir:
# Write the program code to a file in the context directory
local_python_filepath = os.path.join(local_build_dir, program_rel_path)
os.makedirs(os.path.dirname(local_python_filepath), exist_ok=True)
with open(local_python_filepath, "w") as f:
f.write(program_code)
# Generate the python package requirements file in the context directory
local_requirement_filepath = os.path.join(
local_build_dir, arc_requirement_filename
)
_dependency_to_requirements(dependency, local_requirement_filepath)
# Generate Dockerfile in the context directory
local_docker_filepath = os.path.join(local_build_dir, arc_docker_filename)
_generate_dockerfile(
local_docker_filepath,
base_image,
python_version,
arc_requirement_filename,
add_files={program_rel_path: program_container_path},
)
logging.info("Building and pushing container image.")
container_builder = ContainerBuilder(staging_gcs_path, target_image, namespace)
image_name_with_digest = container_builder.build(
local_build_dir, arc_docker_filename, target_image, timeout
)
component_spec.implementation.container.image = image_name_with_digest
# Optionally writing the component definition to a local file for sharing
target_component_file = target_component_file or getattr(
component_func, "_component_target_component_file", None
)
if target_component_file:
component_spec.save(target_component_file)
task_factory_function = _create_task_factory_from_component_spec(component_spec)
return task_factory_function
|
def build_python_component(
component_func,
target_image,
base_image=None,
dependency=[],
staging_gcs_path=None,
timeout=600,
namespace=None,
target_component_file=None,
python_version="python3",
):
"""build_component automatically builds a container image for the component_func based on the base_image and pushes to the target_image.
Args:
component_func (python function): The python function to build components upon
base_image (str): Docker image to use as a base image
target_image (str): Full URI to push the target image
staging_gcs_path (str): GCS blob that can store temporary build files
target_image (str): target image path
timeout (int): the timeout for the image build(in secs), default is 600 seconds
namespace (str): the namespace within which to run the kubernetes kaniko job. If the
job is running on GKE and value is None the underlying functions will use the default namespace from GKE. .
dependency (list): a list of VersionedDependency, which includes the package name and versions, default is empty
python_version (str): choose python2 or python3, default is python3
Raises:
ValueError: The function is not decorated with python_component decorator or the python_version is neither python2 nor python3
"""
_configure_logger(logging.getLogger())
if component_func is None:
raise ValueError("component_func must not be None")
if target_image is None:
raise ValueError("target_image must not be None")
if python_version not in ["python2", "python3"]:
raise ValueError("python_version has to be either python2 or python3")
if staging_gcs_path is None:
raise ValueError("staging_gcs_path must not be None")
if base_image is None:
base_image = getattr(component_func, "_component_base_image", None)
if base_image is None:
from ..components._python_op import default_base_image_or_builder
base_image = default_base_image_or_builder
if isinstance(base_image, Callable):
base_image = base_image()
logging.info(
"Build an image that is based on "
+ base_image
+ " and push the image to "
+ target_image
)
component_spec = _func_to_component_spec(component_func, base_image=base_image)
command_line_args = component_spec.implementation.container.command
dash_c_index = command_line_args.index("-c")
program_code_index = dash_c_index + 1
program_code = command_line_args[program_code_index]
program_rel_path = "ml/main.py"
program_container_path = "/" + program_rel_path
# Replacing the inline code with calling a local program
# Before: python3 -u -c 'import sys ...' --param1 ...
# After: python3 -u main.py --param1 ...
command_line_args[program_code_index] = program_container_path
command_line_args.pop(dash_c_index)
if python_version == "python2":
import warnings
warnings.warn("Python2 is not longer supported")
# Replacing the python interpreter
python_interpreter_index = command_line_args.index("python3")
command_line_args[python_interpreter_index] = python_version
arc_docker_filename = "Dockerfile"
arc_requirement_filename = "requirements.txt"
with tempfile.TemporaryDirectory() as local_build_dir:
# Write the program code to a file in the context directory
local_python_filepath = os.path.join(local_build_dir, program_rel_path)
os.makedirs(os.path.dirname(local_python_filepath), exist_ok=True)
with open(local_python_filepath, "w") as f:
f.write(program_code)
# Generate the python package requirements file in the context directory
local_requirement_filepath = os.path.join(
local_build_dir, arc_requirement_filename
)
_dependency_to_requirements(dependency, local_requirement_filepath)
# Generate Dockerfile in the context directory
local_docker_filepath = os.path.join(local_build_dir, arc_docker_filename)
_generate_dockerfile(
local_docker_filepath,
base_image,
python_version,
arc_requirement_filename,
add_files={program_rel_path: program_container_path},
)
logging.info("Building and pushing container image.")
container_builder = ContainerBuilder(staging_gcs_path, target_image, namespace)
image_name_with_digest = container_builder.build(
local_build_dir, arc_docker_filename, target_image, timeout
)
component_spec.implementation.container.image = image_name_with_digest
# Optionally writing the component definition to a local file for sharing
target_component_file = target_component_file or getattr(
component_func, "_component_target_component_file", None
)
if target_component_file:
component_spec.save(target_component_file)
task_factory_function = _create_task_factory_from_component_spec(component_spec)
return task_factory_function
|
https://github.com/kubeflow/pipelines/issues/4849
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.8/site-packages/torch/_utils_internal.py", line 49, in get_source_lines_and_file
sourcelines, file_lineno = inspect.getsourcelines(obj)
File "/opt/conda/lib/python3.8/inspect.py", line 967, in getsourcelines
lines, lnum = findsource(object)
File "/opt/conda/lib/python3.8/inspect.py", line 798, in findsource
raise OSError('could not get source code')
OSError: could not get source code
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "<string>", line 55, in <module>
File "<string>", line 44, in create_fully_connected_pytorch_network
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_script.py", line 897, in script
return torch.jit._recursive.create_script_module(
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 352, in create_script_module
return create_script_module_impl(nn_module, concrete_type, stubs_fn)
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 364, in create_script_module_impl
method_stubs = stubs_fn(nn_module)
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 591, in infer_methods_to_compile
stubs.append(make_stub_from_method(nn_module, method))
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 52, in make_stub_from_method
return make_stub(func, method_name)
File "/opt/conda/lib/python3.8/site-packages/torch/jit/_recursive.py", line 37, in make_stub
ast = get_jit_def(func, name, self_name="RecursiveScriptModule")
File "/opt/conda/lib/python3.8/site-packages/torch/jit/frontend.py", line 197, in get_jit_def
sourcelines, file_lineno, filename = get_source_lines_and_file(fn, torch._C.ErrorReport.call_stack())
File "/opt/conda/lib/python3.8/site-packages/torch/_utils_internal.py", line 56, in get_source_lines_and_file
raise OSError(msg) from e
OSError: Can't get source for <bound method create_fully_connected_pytorch_network.<locals>.FullyConnectedNetwork.forward of FullyConnectedNetwork(
(layers): ModuleList(
(0): Linear(in_features=10, out_features=100, bias=True)
(1): Linear(in_features=100, out_features=10, bias=True)
(2): Linear(in_features=10, out_features=1, bias=True)
)
)>. TorchScript requires source access in order to carry out compilation, make sure original .py files are available.
|
OSError
|
def batch_predict(
project_id,
model_path,
input_paths,
input_data_format,
output_path,
region,
job_id_output_path,
output_data_format=None,
prediction_input=None,
job_id_prefix=None,
wait_interval=30,
):
"""Creates a MLEngine batch prediction job.
Args:
project_id (str): Required. The ID of the parent project of the job.
model_path (str): Required. The path to the model. It can be either:
`projects/[PROJECT_ID]/models/[MODEL_ID]` or
`projects/[PROJECT_ID]/models/[MODEL_ID]/versions/[VERSION_ID]`
or a GCS path of a model file.
input_paths (list): Required. The Google Cloud Storage location of
the input data files. May contain wildcards.
input_data_format (str): Required. The format of the input data files.
See https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#DataFormat.
output_path (str): Required. The output Google Cloud Storage location.
region (str): Required. The Google Compute Engine region to run the
prediction job in.
output_data_format (str): Optional. Format of the output data files,
defaults to JSON.
prediction_input (dict): Input parameters to create a prediction job.
job_id_prefix (str): the prefix of the generated job id.
wait_interval (int): optional wait interval between calls
to get job status. Defaults to 30.
"""
if not prediction_input:
prediction_input = {}
if not model_path:
raise ValueError("model_path must be provided.")
if _is_model_name(model_path):
prediction_input["modelName"] = model_path
elif _is_model_version_name(model_path):
prediction_input["versionName"] = model_path
elif _is_gcs_path(model_path):
prediction_input["uri"] = model_path
else:
raise ValueError("model_path value is invalid.")
if input_paths:
prediction_input["inputPaths"] = input_paths
if input_data_format:
prediction_input["dataFormat"] = input_data_format
if output_path:
prediction_input["outputPath"] = output_path
if output_data_format:
prediction_input["outputDataFormat"] = output_data_format
if region:
prediction_input["region"] = region
job = {"predictionInput": prediction_input}
create_job(
project_id=project_id,
job=job,
job_id_prefix=job_id_prefix,
wait_interval=wait_interval,
job_id_output_path=job_id_output_path,
)
|
def batch_predict(
project_id,
model_path,
input_paths,
input_data_format,
output_path,
region,
job_id_output_path,
output_data_format=None,
prediction_input=None,
job_id_prefix=None,
wait_interval=30,
):
"""Creates a MLEngine batch prediction job.
Args:
project_id (str): Required. The ID of the parent project of the job.
model_path (str): Required. The path to the model. It can be either:
`projects/[PROJECT_ID]/models/[MODEL_ID]` or
`projects/[PROJECT_ID]/models/[MODEL_ID]/versions/[VERSION_ID]`
or a GCS path of a model file.
input_paths (list): Required. The Google Cloud Storage location of
the input data files. May contain wildcards.
input_data_format (str): Required. The format of the input data files.
See https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#DataFormat.
output_path (str): Required. The output Google Cloud Storage location.
region (str): Required. The Google Compute Engine region to run the
prediction job in.
output_data_format (str): Optional. Format of the output data files,
defaults to JSON.
prediction_input (dict): Input parameters to create a prediction job.
job_id_prefix (str): the prefix of the generated job id.
wait_interval (int): optional wait interval between calls
to get job status. Defaults to 30.
"""
if not prediction_input:
prediction_input = {}
if not model_path:
raise ValueError("model_path must be provided.")
if _is_model_name(model_path):
prediction_input["modelName"] = model_path
elif _is_model_version_name(model_path):
prediction_input["versionName"] = model_path
elif _is_gcs_path(model_path):
prediction_input["uri"] = model_path
else:
raise ValueError("model_path value is invalid.")
if input_paths:
prediction_input["inputPaths"] = input_paths
if input_data_format:
prediction_input["dataFormat"] = input_data_format
if output_path:
prediction_input["outputPath"] = output_path
if output_data_format:
prediction_input["outputDataFormat"] = output_data_format
if region:
prediction_input["region"] = region
job = {"predictionInput": prediction_input}
create_job(
project_id,
job,
job_id_prefix,
wait_interval,
job_id_output_path=job_id_output_path,
)
|
https://github.com/kubeflow/pipelines/issues/4430
|
INFO:root:Start KFP context with ID: 858b3ff01cdeed5c0b0b7fd9d2655641.
Traceback (most recent call last):
File "/usr/local/lib/python2.7/runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "/usr/local/lib/python2.7/runpy.py", line 72, in _run_code
exec code in run_globals
File "/ml/kfp_component/launcher/__main__.py", line 45, in <module>
main()
File "/ml/kfp_component/launcher/__main__.py", line 42, in main
launch(args.file_or_module, args.args)
File "kfp_component/launcher/launcher.py", line 45, in launch
return fire.Fire(module, command=args, name=module.__name__)
File "/usr/local/lib/python2.7/site-packages/fire/core.py", line 127, in Fire
component_trace = _Fire(component, args, context, name)
File "/usr/local/lib/python2.7/site-packages/fire/core.py", line 366, in _Fire
component, remaining_args)
File "/usr/local/lib/python2.7/site-packages/fire/core.py", line 542, in _CallCallable
result = fn(*varargs, **kwargs)
File "kfp_component/google/ml_engine/_batch_predict.py", line 70, in batch_predict
create_job(project_id, job, job_id_prefix, wait_interval)
File "kfp_component/google/ml_engine/_create_job.py", line 48, in create_job
return CreateJobOp(project_id, job, job_id_prefix, job_id, wait_interval
File "kfp_component/google/ml_engine/_create_job.py", line 63, in execute_and_wait
self._set_job_id(ctx.context_id())
File "kfp_component/google/ml_engine/_create_job.py", line 74, in _set_job_id
job_id = gcp_common.normalize_name(job_id)
File "kfp_component/google/common/_utils.py", line 45, in normalize_name
invalid_char_placeholder, name)
File "/usr/local/lib/python2.7/re.py", line 155, in sub
return _compile(pattern, flags).sub(repl, string, count)
TypeError: expected string or buffer
|
TypeError
|
def train(
project_id,
job_id_output_path,
python_module=None,
package_uris=None,
region=None,
args=None,
job_dir=None,
python_version=None,
runtime_version=None,
master_image_uri=None,
worker_image_uri=None,
training_input=None,
job_id_prefix=None,
job_id=None,
wait_interval=30,
):
"""Creates a MLEngine training job.
Args:
project_id (str): Required. The ID of the parent project of the job.
python_module (str): Required. The Python module name to run after
installing the packages.
package_uris (list): Required. The Google Cloud Storage location of
the packages with the training program and any additional
dependencies. The maximum number of package URIs is 100.
region (str): Required. The Google Compute Engine region to run the
training job in
args (list): Command line arguments to pass to the program.
job_dir (str): A Google Cloud Storage path in which to store training
outputs and other data needed for training. This path is passed
to your TensorFlow program as the '--job-dir' command-line
argument. The benefit of specifying this field is that Cloud ML
validates the path for use in training.
python_version (str): Optional. The version of Python used in training.
If not set, the default version is '2.7'. Python '3.5' is
available when runtimeVersion is set to '1.4' and above.
Python '2.7' works with all supported runtime versions.
runtime_version (str): Optional. The Cloud ML Engine runtime version
to use for training. If not set, Cloud ML Engine uses the
default stable version, 1.0.
master_image_uri (str): The Docker image to run on the master replica.
This image must be in Container Registry.
worker_image_uri (str): The Docker image to run on the worker replica.
This image must be in Container Registry.
training_input (dict): Input parameters to create a training job.
job_id_prefix (str): the prefix of the generated job id.
job_id (str): the created job_id, takes precedence over generated job
id if set.
wait_interval (int): optional wait interval between calls
to get job status. Defaults to 30.
"""
if not training_input:
training_input = {}
if python_module:
training_input["pythonModule"] = python_module
if package_uris:
training_input["packageUris"] = package_uris
if region:
training_input["region"] = region
if args:
training_input["args"] = args
if job_dir:
training_input["jobDir"] = job_dir
if python_version:
training_input["pythonVersion"] = python_version
if runtime_version:
training_input["runtimeVersion"] = runtime_version
if master_image_uri:
if "masterConfig" not in training_input:
training_input["masterConfig"] = {}
training_input["masterConfig"]["imageUri"] = master_image_uri
if worker_image_uri:
if "workerConfig" not in training_input:
training_input["workerConfig"] = {}
training_input["workerConfig"]["imageUri"] = worker_image_uri
job = {"trainingInput": training_input}
return create_job(
project_id=project_id,
job=job,
job_id_prefix=job_id_prefix,
job_id=job_id,
wait_interval=wait_interval,
job_id_output_path=job_id_output_path,
)
|
def train(
project_id,
job_id_output_path,
python_module=None,
package_uris=None,
region=None,
args=None,
job_dir=None,
python_version=None,
runtime_version=None,
master_image_uri=None,
worker_image_uri=None,
training_input=None,
job_id_prefix=None,
job_id=None,
wait_interval=30,
):
"""Creates a MLEngine training job.
Args:
project_id (str): Required. The ID of the parent project of the job.
python_module (str): Required. The Python module name to run after
installing the packages.
package_uris (list): Required. The Google Cloud Storage location of
the packages with the training program and any additional
dependencies. The maximum number of package URIs is 100.
region (str): Required. The Google Compute Engine region to run the
training job in
args (list): Command line arguments to pass to the program.
job_dir (str): A Google Cloud Storage path in which to store training
outputs and other data needed for training. This path is passed
to your TensorFlow program as the '--job-dir' command-line
argument. The benefit of specifying this field is that Cloud ML
validates the path for use in training.
python_version (str): Optional. The version of Python used in training.
If not set, the default version is '2.7'. Python '3.5' is
available when runtimeVersion is set to '1.4' and above.
Python '2.7' works with all supported runtime versions.
runtime_version (str): Optional. The Cloud ML Engine runtime version
to use for training. If not set, Cloud ML Engine uses the
default stable version, 1.0.
master_image_uri (str): The Docker image to run on the master replica.
This image must be in Container Registry.
worker_image_uri (str): The Docker image to run on the worker replica.
This image must be in Container Registry.
training_input (dict): Input parameters to create a training job.
job_id_prefix (str): the prefix of the generated job id.
job_id (str): the created job_id, takes precedence over generated job
id if set.
wait_interval (int): optional wait interval between calls
to get job status. Defaults to 30.
"""
if not training_input:
training_input = {}
if python_module:
training_input["pythonModule"] = python_module
if package_uris:
training_input["packageUris"] = package_uris
if region:
training_input["region"] = region
if args:
training_input["args"] = args
if job_dir:
training_input["jobDir"] = job_dir
if python_version:
training_input["pythonVersion"] = python_version
if runtime_version:
training_input["runtimeVersion"] = runtime_version
if master_image_uri:
if "masterConfig" not in training_input:
training_input["masterConfig"] = {}
training_input["masterConfig"]["imageUri"] = master_image_uri
if worker_image_uri:
if "workerConfig" not in training_input:
training_input["workerConfig"] = {}
training_input["workerConfig"]["imageUri"] = worker_image_uri
job = {"trainingInput": training_input}
return create_job(
project_id,
job,
job_id_prefix,
job_id,
wait_interval,
job_id_output_path=job_id_output_path,
)
|
https://github.com/kubeflow/pipelines/issues/4430
|
INFO:root:Start KFP context with ID: 858b3ff01cdeed5c0b0b7fd9d2655641.
Traceback (most recent call last):
File "/usr/local/lib/python2.7/runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "/usr/local/lib/python2.7/runpy.py", line 72, in _run_code
exec code in run_globals
File "/ml/kfp_component/launcher/__main__.py", line 45, in <module>
main()
File "/ml/kfp_component/launcher/__main__.py", line 42, in main
launch(args.file_or_module, args.args)
File "kfp_component/launcher/launcher.py", line 45, in launch
return fire.Fire(module, command=args, name=module.__name__)
File "/usr/local/lib/python2.7/site-packages/fire/core.py", line 127, in Fire
component_trace = _Fire(component, args, context, name)
File "/usr/local/lib/python2.7/site-packages/fire/core.py", line 366, in _Fire
component, remaining_args)
File "/usr/local/lib/python2.7/site-packages/fire/core.py", line 542, in _CallCallable
result = fn(*varargs, **kwargs)
File "kfp_component/google/ml_engine/_batch_predict.py", line 70, in batch_predict
create_job(project_id, job, job_id_prefix, wait_interval)
File "kfp_component/google/ml_engine/_create_job.py", line 48, in create_job
return CreateJobOp(project_id, job, job_id_prefix, job_id, wait_interval
File "kfp_component/google/ml_engine/_create_job.py", line 63, in execute_and_wait
self._set_job_id(ctx.context_id())
File "kfp_component/google/ml_engine/_create_job.py", line 74, in _set_job_id
job_id = gcp_common.normalize_name(job_id)
File "kfp_component/google/common/_utils.py", line 45, in normalize_name
invalid_char_placeholder, name)
File "/usr/local/lib/python2.7/re.py", line 155, in sub
return _compile(pattern, flags).sub(repl, string, count)
TypeError: expected string or buffer
|
TypeError
|
def create_version(
model_name,
deployemnt_uri=None,
version_id=None,
runtime_version=None,
python_version=None,
version=None,
replace_existing=False,
wait_interval=30,
version_name_output_path="/tmp/kfp/output/ml_engine/version_name.txt",
version_object_output_path="/tmp/kfp/output/ml_engine/version.json",
):
"""Creates a MLEngine version and wait for the operation to be done.
Args:
model_name (str): required, the name of the parent model.
deployment_uri (str): optional, the Google Cloud Storage location of
the trained model used to create the version.
version_id (str): optional, the user provided short name of
the version. If it is not provided, the operation uses a random name.
runtime_version (str): optinal, the Cloud ML Engine runtime version
to use for this deployment. If not set, Cloud ML Engine uses
the default stable version, 1.0.
python_version (str): optinal, the version of Python used in prediction.
If not set, the default version is '2.7'. Python '3.5' is available
when runtimeVersion is set to '1.4' and above. Python '2.7' works
with all supported runtime versions.
version (dict): optional, the payload of the new version.
replace_existing (boolean): boolean flag indicates whether to replace
existing version in case of conflict.
wait_interval (int): the interval to wait for a long running operation.
"""
if not version:
version = {}
if deployemnt_uri:
version["deploymentUri"] = deployemnt_uri
if version_id:
version["name"] = version_id
if runtime_version:
version["runtimeVersion"] = runtime_version
if python_version:
version["pythonVersion"] = python_version
return CreateVersionOp(
model_name,
version,
replace_existing,
wait_interval,
version_name_output_path=version_name_output_path,
version_object_output_path=version_object_output_path,
).execute_and_wait()
|
def create_version(
model_name,
deployemnt_uri=None,
version_id=None,
runtime_version=None,
python_version=None,
version=None,
replace_existing=False,
wait_interval=30,
version_name_output_path="/tmp/kfp/output/ml_engine/version_name.txt",
version_object_output_path="/tmp/kfp/output/ml_engine/version.json",
):
"""Creates a MLEngine version and wait for the operation to be done.
Args:
model_name (str): required, the name of the parent model.
deployment_uri (str): optional, the Google Cloud Storage location of
the trained model used to create the version.
version_id (str): optional, the user provided short name of
the version. If it is not provided, the operation uses a random name.
runtime_version (str): optinal, the Cloud ML Engine runtime version
to use for this deployment. If not set, Cloud ML Engine uses
the default stable version, 1.0.
python_version (str): optinal, the version of Python used in prediction.
If not set, the default version is '2.7'. Python '3.5' is available
when runtimeVersion is set to '1.4' and above. Python '2.7' works
with all supported runtime versions.
version (str): optional, the payload of the new version.
replace_existing (boolean): boolean flag indicates whether to replace
existing version in case of conflict.
wait_interval (int): the interval to wait for a long running operation.
"""
if not version:
version = {}
if deployemnt_uri:
version["deploymentUri"] = deployemnt_uri
if version_id:
version["name"] = version_id
if runtime_version:
version["runtimeVersion"] = runtime_version
if python_version:
version["pythonVersion"] = python_version
return CreateVersionOp(
model_name,
version,
replace_existing,
wait_interval,
version_name_output_path=version_name_output_path,
version_object_output_path=version_object_output_path,
).execute_and_wait()
|
https://github.com/kubeflow/pipelines/issues/4291
|
Traceback (most recent call last):
File "/usr/local/lib/python2.7/runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "/usr/local/lib/python2.7/runpy.py", line 72, in _run_code
exec code in run_globals
File "/ml/kfp_component/launcher/__main__.py", line 34, in <module>
main()
File "/ml/kfp_component/launcher/__main__.py", line 31, in main
launch(args.file_or_module, args.args)
File "kfp_component/launcher/launcher.py", line 45, in launch
return fire.Fire(module, command=args, name=module.__name__)
File "/usr/local/lib/python2.7/site-packages/fire/core.py", line 127, in Fire
component_trace = _Fire(component, args, context, name)
File "/usr/local/lib/python2.7/site-packages/fire/core.py", line 366, in _Fire
component, remaining_args)
File "/usr/local/lib/python2.7/site-packages/fire/core.py", line 542, in _CallCallable
result = fn(*varargs, **kwargs)
File "kfp_component/google/ml_engine/_deploy.py", line 68, in deploy
wait_interval)
File "kfp_component/google/ml_engine/_create_version.py", line 55, in create_version
version['deploymentUri'] = deployemnt_uri
TypeError: 'str' object does not support item assignment
|
TypeError
|
def _load_config(
self,
host,
client_id,
namespace,
other_client_id,
other_client_secret,
existing_token,
proxy,
ssl_ca_cert,
):
config = kfp_server_api.configuration.Configuration()
if proxy:
# https://github.com/kubeflow/pipelines/blob/c6ac5e0b1fd991e19e96419f0f508ec0a4217c29/backend/api/python_http_client/kfp_server_api/rest.py#L100
config.proxy = proxy
if ssl_ca_cert:
config.ssl_ca_cert = ssl_ca_cert
host = host or ""
# Preprocess the host endpoint to prevent some common user mistakes.
if not client_id:
# always preserving the protocol (http://localhost requires it)
host = host.rstrip("/")
if host:
config.host = host
token = None
# "existing_token" is designed to accept token generated outside of SDK. Here is an example.
#
# https://cloud.google.com/functions/docs/securing/function-identity
# https://cloud.google.com/endpoints/docs/grpc/service-account-authentication
#
# import requests
# import kfp
#
# def get_access_token():
# url = 'http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token'
# r = requests.get(url, headers={'Metadata-Flavor': 'Google'})
# r.raise_for_status()
# access_token = r.json()['access_token']
# return access_token
#
# client = kfp.Client(host='<KFPHost>', existing_token=get_access_token())
#
if existing_token:
token = existing_token
self._is_refresh_token = False
elif client_id:
token = get_auth_token(client_id, other_client_id, other_client_secret)
self._is_refresh_token = True
elif self._is_inverse_proxy_host(host):
token = get_gcp_access_token()
self._is_refresh_token = False
if token:
config.api_key["authorization"] = token
config.api_key_prefix["authorization"] = "Bearer"
return config
if host:
# if host is explicitly set with auth token, it's probably a port forward address.
return config
import kubernetes as k8s
in_cluster = True
try:
k8s.config.load_incluster_config()
except:
in_cluster = False
pass
if in_cluster:
config.host = Client.IN_CLUSTER_DNS_NAME.format(namespace)
return config
try:
k8s.config.load_kube_config(client_configuration=config)
except:
print("Failed to load kube config.")
return config
if config.host:
config.host = config.host + "/" + Client.KUBE_PROXY_PATH.format(namespace)
return config
|
def _load_config(
self,
host,
client_id,
namespace,
other_client_id,
other_client_secret,
existing_token,
proxy,
ssl_ca_cert,
):
config = kfp_server_api.configuration.Configuration()
if proxy:
# https://github.com/kubeflow/pipelines/blob/c6ac5e0b1fd991e19e96419f0f508ec0a4217c29/backend/api/python_http_client/kfp_server_api/rest.py#L100
config.proxy = proxy
if ssl_ca_cert:
config.ssl_ca_cert = ssl_ca_cert
host = host or ""
# Preprocess the host endpoint to prevent some common user mistakes.
# This should only be done for non-IAP cases (when client_id is None). IAP requires preserving the protocol.
if not client_id:
# Per feedback in proxy env, http or https is still required
if not proxy:
host = re.sub(r"^(http|https)://", "", host)
host = host.rstrip("/")
if host:
config.host = host
token = None
# "existing_token" is designed to accept token generated outside of SDK. Here is an example.
#
# https://cloud.google.com/functions/docs/securing/function-identity
# https://cloud.google.com/endpoints/docs/grpc/service-account-authentication
#
# import requests
# import kfp
#
# def get_access_token():
# url = 'http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token'
# r = requests.get(url, headers={'Metadata-Flavor': 'Google'})
# r.raise_for_status()
# access_token = r.json()['access_token']
# return access_token
#
# client = kfp.Client(host='<KFPHost>', existing_token=get_access_token())
#
if existing_token:
token = existing_token
self._is_refresh_token = False
elif client_id:
token = get_auth_token(client_id, other_client_id, other_client_secret)
self._is_refresh_token = True
elif self._is_inverse_proxy_host(host):
token = get_gcp_access_token()
self._is_refresh_token = False
if token:
config.api_key["authorization"] = token
config.api_key_prefix["authorization"] = "Bearer"
return config
if host:
# if host is explicitly set with auth token, it's probably a port forward address.
return config
import kubernetes as k8s
in_cluster = True
try:
k8s.config.load_incluster_config()
except:
in_cluster = False
pass
if in_cluster:
config.host = Client.IN_CLUSTER_DNS_NAME.format(namespace)
return config
try:
k8s.config.load_kube_config(client_configuration=config)
except:
print("Failed to load kube config.")
return config
if config.host:
config.host = config.host + "/" + Client.KUBE_PROXY_PATH.format(namespace)
return config
|
https://github.com/kubeflow/pipelines/issues/4277
|
Traceback (most recent call last):
File "../kubeflow-apply/kubeflow-apply.py", line 73, in <module>
args.func()
File "../kubeflow-apply/kubeflow-apply.py", line 33, in pipelines
self.client.upload_pipeline(filename, name, description)
File "/usr/local/lib/python3.8/site-packages/kfp/_client.py", line 723, in upload_pipeline
response = self._upload_api.upload_pipeline(pipeline_package_path, name=pipeline_name, description=description)
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api/pipeline_upload_service_api.py", line 83, in upload_pipeline
return self.upload_pipeline_with_http_info(uploadfile, **kwargs) # noqa: E501
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api/pipeline_upload_service_api.py", line 177, in upload_pipeline_with_http_info
return self.api_client.call_api(
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api_client.py", line 379, in call_api
return self.__call_api(resource_path, method,
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api_client.py", line 196, in __call_api
response_data = self.request(
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api_client.py", line 422, in request
return self.rest_client.POST(url,
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/rest.py", line 279, in POST
return self.request("POST", url,
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/rest.py", line 196, in request
r = self.pool_manager.request(
File "/usr/local/lib/python3.8/site-packages/urllib3/request.py", line 79, in request
return self.request_encode_body(
File "/usr/local/lib/python3.8/site-packages/urllib3/request.py", line 172, in request_encode_body
return self.urlopen(method, url, **extra_kw)
File "/usr/local/lib/python3.8/site-packages/urllib3/poolmanager.py", line 326, in urlopen
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
File "/usr/local/lib/python3.8/site-packages/urllib3/poolmanager.py", line 231, in connection_from_host
raise LocationValueError("No host specified.")
urllib3.exceptions.LocationValueError: No host specified.
|
urllib3.exceptions.LocationValueError
|
def _load_config(
self,
host,
client_id,
namespace,
other_client_id,
other_client_secret,
existing_token,
):
config = kfp_server_api.configuration.Configuration()
host = host or ""
# Preprocess the host endpoint to prevent some common user mistakes.
if not client_id:
# always preserving the protocol (http://localhost requires it)
host = host.rstrip("/")
if host:
config.host = host
token = None
# "existing_token" is designed to accept token generated outside of SDK. Here is an example.
#
# https://cloud.google.com/functions/docs/securing/function-identity
# https://cloud.google.com/endpoints/docs/grpc/service-account-authentication
#
# import requests
# import kfp
#
# def get_access_token():
# url = 'http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token'
# r = requests.get(url, headers={'Metadata-Flavor': 'Google'})
# r.raise_for_status()
# access_token = r.json()['access_token']
# return access_token
#
# client = kfp.Client(host='<KFPHost>', existing_token=get_access_token())
#
if existing_token:
token = existing_token
elif client_id:
token = get_auth_token(client_id, other_client_id, other_client_secret)
elif self._is_inverse_proxy_host(host):
token = get_gcp_access_token()
if token:
config.api_key["authorization"] = token
config.api_key_prefix["authorization"] = "Bearer"
return config
if host:
# if host is explicitly set with auth token, it's probably a port forward address.
return config
import kubernetes as k8s
in_cluster = True
try:
k8s.config.load_incluster_config()
except:
in_cluster = False
pass
if in_cluster:
config.host = Client.IN_CLUSTER_DNS_NAME.format(namespace)
return config
try:
k8s.config.load_kube_config(client_configuration=config)
except:
print("Failed to load kube config.")
return config
if config.host:
config.host = config.host + "/" + Client.KUBE_PROXY_PATH.format(namespace)
return config
|
def _load_config(
self,
host,
client_id,
namespace,
other_client_id,
other_client_secret,
existing_token,
):
config = kfp_server_api.configuration.Configuration()
host = host or ""
# Preprocess the host endpoint to prevent some common user mistakes.
# This should only be done for non-IAP cases (when client_id is None). IAP requires preserving the protocol.
if not client_id:
host = re.sub(r"^(http|https)://", "", host).rstrip("/")
if host:
config.host = host
token = None
# "existing_token" is designed to accept token generated outside of SDK. Here is an example.
#
# https://cloud.google.com/functions/docs/securing/function-identity
# https://cloud.google.com/endpoints/docs/grpc/service-account-authentication
#
# import requests
# import kfp
#
# def get_access_token():
# url = 'http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token'
# r = requests.get(url, headers={'Metadata-Flavor': 'Google'})
# r.raise_for_status()
# access_token = r.json()['access_token']
# return access_token
#
# client = kfp.Client(host='<KFPHost>', existing_token=get_access_token())
#
if existing_token:
token = existing_token
elif client_id:
token = get_auth_token(client_id, other_client_id, other_client_secret)
elif self._is_inverse_proxy_host(host):
token = get_gcp_access_token()
if token:
config.api_key["authorization"] = token
config.api_key_prefix["authorization"] = "Bearer"
return config
if host:
# if host is explicitly set with auth token, it's probably a port forward address.
return config
import kubernetes as k8s
in_cluster = True
try:
k8s.config.load_incluster_config()
except:
in_cluster = False
pass
if in_cluster:
config.host = Client.IN_CLUSTER_DNS_NAME.format(namespace)
return config
try:
k8s.config.load_kube_config(client_configuration=config)
except:
print("Failed to load kube config.")
return config
if config.host:
config.host = config.host + "/" + Client.KUBE_PROXY_PATH.format(namespace)
return config
|
https://github.com/kubeflow/pipelines/issues/4277
|
Traceback (most recent call last):
File "../kubeflow-apply/kubeflow-apply.py", line 73, in <module>
args.func()
File "../kubeflow-apply/kubeflow-apply.py", line 33, in pipelines
self.client.upload_pipeline(filename, name, description)
File "/usr/local/lib/python3.8/site-packages/kfp/_client.py", line 723, in upload_pipeline
response = self._upload_api.upload_pipeline(pipeline_package_path, name=pipeline_name, description=description)
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api/pipeline_upload_service_api.py", line 83, in upload_pipeline
return self.upload_pipeline_with_http_info(uploadfile, **kwargs) # noqa: E501
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api/pipeline_upload_service_api.py", line 177, in upload_pipeline_with_http_info
return self.api_client.call_api(
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api_client.py", line 379, in call_api
return self.__call_api(resource_path, method,
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api_client.py", line 196, in __call_api
response_data = self.request(
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/api_client.py", line 422, in request
return self.rest_client.POST(url,
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/rest.py", line 279, in POST
return self.request("POST", url,
File "/usr/local/lib/python3.8/site-packages/kfp_server_api/rest.py", line 196, in request
r = self.pool_manager.request(
File "/usr/local/lib/python3.8/site-packages/urllib3/request.py", line 79, in request
return self.request_encode_body(
File "/usr/local/lib/python3.8/site-packages/urllib3/request.py", line 172, in request_encode_body
return self.urlopen(method, url, **extra_kw)
File "/usr/local/lib/python3.8/site-packages/urllib3/poolmanager.py", line 326, in urlopen
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
File "/usr/local/lib/python3.8/site-packages/urllib3/poolmanager.py", line 231, in connection_from_host
raise LocationValueError("No host specified.")
urllib3.exceptions.LocationValueError: No host specified.
|
urllib3.exceptions.LocationValueError
|
def create_run_from_pipeline_func(
self,
pipeline_func: Callable,
arguments: Mapping[str, str],
run_name=None,
experiment_name=None,
pipeline_conf: kfp.dsl.PipelineConf = None,
namespace=None,
):
"""Runs pipeline on KFP-enabled Kubernetes cluster.
This command compiles the pipeline function, creates or gets an experiment and submits the pipeline for execution.
Args:
pipeline_func: A function that describes a pipeline by calling components and composing them into execution graph.
arguments: Arguments to the pipeline function provided as a dict.
run_name: Optional. Name of the run to be shown in the UI.
experiment_name: Optional. Name of the experiment to add the run to.
namespace: kubernetes namespace where the pipeline runs are created.
For single user deployment, leave it as None;
For multi user, input a namespace where the user is authorized
"""
# TODO: Check arguments against the pipeline function
pipeline_name = pipeline_func.__name__
run_name = run_name or pipeline_name + " " + datetime.datetime.now().strftime(
"%Y-%m-%d %H-%M-%S"
)
with tempfile.TemporaryDirectory() as tmpdir:
pipeline_package_path = os.path.join(tmpdir, "pipeline.yaml")
compiler.Compiler().compile(
pipeline_func, pipeline_package_path, pipeline_conf=pipeline_conf
)
return self.create_run_from_pipeline_package(
pipeline_package_path, arguments, run_name, experiment_name, namespace
)
|
def create_run_from_pipeline_func(
self,
pipeline_func: Callable,
arguments: Mapping[str, str],
run_name=None,
experiment_name=None,
pipeline_conf: kfp.dsl.PipelineConf = None,
namespace=None,
):
"""Runs pipeline on KFP-enabled Kubernetes cluster.
This command compiles the pipeline function, creates or gets an experiment and submits the pipeline for execution.
Args:
pipeline_func: A function that describes a pipeline by calling components and composing them into execution graph.
arguments: Arguments to the pipeline function provided as a dict.
run_name: Optional. Name of the run to be shown in the UI.
experiment_name: Optional. Name of the experiment to add the run to.
namespace: kubernetes namespace where the pipeline runs are created.
For single user deployment, leave it as None;
For multi user, input a namespace where the user is authorized
"""
# TODO: Check arguments against the pipeline function
pipeline_name = pipeline_func.__name__
run_name = run_name or pipeline_name + " " + datetime.now().strftime(
"%Y-%m-%d %H-%M-%S"
)
with tempfile.TemporaryDirectory() as tmpdir:
pipeline_package_path = os.path.join(tmpdir, "pipeline.yaml")
compiler.Compiler().compile(
pipeline_func, pipeline_package_path, pipeline_conf=pipeline_conf
)
return self.create_run_from_pipeline_package(
pipeline_package_path, arguments, run_name, experiment_name, namespace
)
|
https://github.com/kubeflow/pipelines/issues/3630
|
AttributeError
Traceback (most recent call last)
<ipython-input-8-8ebf2be6196c> in <module>
1 client = kfp.Client(host='2c688415e5fe965e-dot-us-central2.pipelines.googleusercontent.com')
----> 2 client.wait_for_run_completion(run_id=run_result.run_id, timeout=36000)
~/Library/Python/3.7/lib/python/site-packages/kfp/_client.py in wait_for_run_completion(self, run_id, timeout)
597 while status is None or status.lower() not in ['succeeded', 'failed', 'skipped', 'error']:
598 get_run_response = self._run_api.get_run(run_id=run_id)
--> 599 status = get_run_response.run.status
600 elapsed_time = (datetime.now() - start_time).seconds
601 logging.info('Waiting for the job to complete...')
AttributeError: 'NoneType' object has no attribute 'status'
|
AttributeError
|
def create_run_from_pipeline_package(
self,
pipeline_file: str,
arguments: Mapping[str, str],
run_name=None,
experiment_name=None,
namespace=None,
):
"""Runs pipeline on KFP-enabled Kubernetes cluster.
This command compiles the pipeline function, creates or gets an experiment and submits the pipeline for execution.
Args:
pipeline_file: A compiled pipeline package file.
arguments: Arguments to the pipeline function provided as a dict.
run_name: Optional. Name of the run to be shown in the UI.
experiment_name: Optional. Name of the experiment to add the run to.
namespace: kubernetes namespace where the pipeline runs are created.
For single user deployment, leave it as None;
For multi user, input a namespace where the user is authorized
"""
class RunPipelineResult:
def __init__(self, client, run_info):
self._client = client
self.run_info = run_info
self.run_id = run_info.id
def wait_for_run_completion(self, timeout=None):
timeout = timeout or datetime.timedelta.max
return self._client.wait_for_run_completion(self.run_id, timeout)
def __repr__(self):
return "RunPipelineResult(run_id={})".format(self.run_id)
# TODO: Check arguments against the pipeline function
pipeline_name = os.path.basename(pipeline_file)
experiment_name = experiment_name or os.environ.get(
KF_PIPELINES_DEFAULT_EXPERIMENT_NAME, None
)
overridden_experiment_name = os.environ.get(
KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME, experiment_name
)
if overridden_experiment_name != experiment_name:
import warnings
warnings.warn(
'Changing experiment name from "{}" to "{}".'.format(
experiment_name, overridden_experiment_name
)
)
experiment_name = overridden_experiment_name or "Default"
run_name = run_name or (
pipeline_name + " " + datetime.datetime.now().strftime("%Y-%m-%d %H-%M-%S")
)
experiment = self.create_experiment(name=experiment_name, namespace=namespace)
run_info = self.run_pipeline(experiment.id, run_name, pipeline_file, arguments)
return RunPipelineResult(self, run_info)
|
def create_run_from_pipeline_package(
self,
pipeline_file: str,
arguments: Mapping[str, str],
run_name=None,
experiment_name=None,
namespace=None,
):
"""Runs pipeline on KFP-enabled Kubernetes cluster.
This command compiles the pipeline function, creates or gets an experiment and submits the pipeline for execution.
Args:
pipeline_file: A compiled pipeline package file.
arguments: Arguments to the pipeline function provided as a dict.
run_name: Optional. Name of the run to be shown in the UI.
experiment_name: Optional. Name of the experiment to add the run to.
namespace: kubernetes namespace where the pipeline runs are created.
For single user deployment, leave it as None;
For multi user, input a namespace where the user is authorized
"""
class RunPipelineResult:
def __init__(self, client, run_info):
self._client = client
self.run_info = run_info
self.run_id = run_info.id
def wait_for_run_completion(self, timeout=None):
timeout = timeout or datetime.datetime.max - datetime.datetime.min
return self._client.wait_for_run_completion(self.run_id, timeout)
def __repr__(self):
return "RunPipelineResult(run_id={})".format(self.run_id)
# TODO: Check arguments against the pipeline function
pipeline_name = os.path.basename(pipeline_file)
experiment_name = experiment_name or os.environ.get(
KF_PIPELINES_DEFAULT_EXPERIMENT_NAME, None
)
overridden_experiment_name = os.environ.get(
KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME, experiment_name
)
if overridden_experiment_name != experiment_name:
import warnings
warnings.warn(
'Changing experiment name from "{}" to "{}".'.format(
experiment_name, overridden_experiment_name
)
)
experiment_name = overridden_experiment_name or "Default"
run_name = run_name or pipeline_name + " " + datetime.now().strftime(
"%Y-%m-%d %H-%M-%S"
)
experiment = self.create_experiment(name=experiment_name, namespace=namespace)
run_info = self.run_pipeline(experiment.id, run_name, pipeline_file, arguments)
return RunPipelineResult(self, run_info)
|
https://github.com/kubeflow/pipelines/issues/3630
|
AttributeError
Traceback (most recent call last)
<ipython-input-8-8ebf2be6196c> in <module>
1 client = kfp.Client(host='2c688415e5fe965e-dot-us-central2.pipelines.googleusercontent.com')
----> 2 client.wait_for_run_completion(run_id=run_result.run_id, timeout=36000)
~/Library/Python/3.7/lib/python/site-packages/kfp/_client.py in wait_for_run_completion(self, run_id, timeout)
597 while status is None or status.lower() not in ['succeeded', 'failed', 'skipped', 'error']:
598 get_run_response = self._run_api.get_run(run_id=run_id)
--> 599 status = get_run_response.run.status
600 elapsed_time = (datetime.now() - start_time).seconds
601 logging.info('Waiting for the job to complete...')
AttributeError: 'NoneType' object has no attribute 'status'
|
AttributeError
|
def wait_for_run_completion(self, timeout=None):
timeout = timeout or datetime.timedelta.max
return self._client.wait_for_run_completion(self.run_id, timeout)
|
def wait_for_run_completion(self, timeout=None):
timeout = timeout or datetime.datetime.max - datetime.datetime.min
return self._client.wait_for_run_completion(self.run_id, timeout)
|
https://github.com/kubeflow/pipelines/issues/3630
|
AttributeError
Traceback (most recent call last)
<ipython-input-8-8ebf2be6196c> in <module>
1 client = kfp.Client(host='2c688415e5fe965e-dot-us-central2.pipelines.googleusercontent.com')
----> 2 client.wait_for_run_completion(run_id=run_result.run_id, timeout=36000)
~/Library/Python/3.7/lib/python/site-packages/kfp/_client.py in wait_for_run_completion(self, run_id, timeout)
597 while status is None or status.lower() not in ['succeeded', 'failed', 'skipped', 'error']:
598 get_run_response = self._run_api.get_run(run_id=run_id)
--> 599 status = get_run_response.run.status
600 elapsed_time = (datetime.now() - start_time).seconds
601 logging.info('Waiting for the job to complete...')
AttributeError: 'NoneType' object has no attribute 'status'
|
AttributeError
|
def create_artifact_with_type(
store,
uri: str,
type_name: str,
properties: dict = None,
type_properties: dict = None,
custom_properties: dict = None,
) -> metadata_store_pb2.Artifact:
artifact_type = get_or_create_artifact_type(
store=store,
type_name=type_name,
properties=type_properties,
)
artifact = metadata_store_pb2.Artifact(
uri=uri,
type_id=artifact_type.id,
properties=properties,
custom_properties=custom_properties,
)
artifact.id = store.put_artifacts([artifact])[0]
return artifact
|
def create_artifact_with_type(
store,
uri: str,
type_name: str,
properties: dict = None,
type_properties: dict = None,
) -> metadata_store_pb2.Artifact:
artifact_type = get_or_create_artifact_type(
store=store,
type_name=type_name,
properties=type_properties,
)
artifact = metadata_store_pb2.Artifact(
uri=uri,
type_id=artifact_type.id,
properties=properties,
)
artifact.id = store.put_artifacts([artifact])[0]
return artifact
|
https://github.com/kubeflow/pipelines/issues/3552
|
Kubernetes Pod event: MODIFIED file-passing-pipelines-mbzrz-3001099438 1555702
Traceback (most recent call last):
File "/kfp/metadata_writer/metadata_writer.py", line 183, in <module>
KFP_POD_NAME_EXECUTION_PROPERTY_NAME: pod_name,
File "/kfp/metadata_writer/metadata_helpers.py", line 280, in create_new_execution_in_existing_run_context
custom_properties=custom_properties,
TypeError: create_new_execution_in_existing_context() got an unexpected keyword argument 'custom_properties'
|
TypeError
|
def create_execution_with_type(
store,
type_name: str,
properties: dict = None,
type_properties: dict = None,
custom_properties: dict = None,
) -> metadata_store_pb2.Execution:
execution_type = get_or_create_execution_type(
store=store,
type_name=type_name,
properties=type_properties,
)
execution = metadata_store_pb2.Execution(
type_id=execution_type.id,
properties=properties,
custom_properties=custom_properties,
)
execution.id = store.put_executions([execution])[0]
return execution
|
def create_execution_with_type(
store,
type_name: str,
properties: dict = None,
type_properties: dict = None,
) -> metadata_store_pb2.Execution:
execution_type = get_or_create_execution_type(
store=store,
type_name=type_name,
properties=type_properties,
)
execution = metadata_store_pb2.Execution(
type_id=execution_type.id,
properties=properties,
)
execution.id = store.put_executions([execution])[0]
return execution
|
https://github.com/kubeflow/pipelines/issues/3552
|
Kubernetes Pod event: MODIFIED file-passing-pipelines-mbzrz-3001099438 1555702
Traceback (most recent call last):
File "/kfp/metadata_writer/metadata_writer.py", line 183, in <module>
KFP_POD_NAME_EXECUTION_PROPERTY_NAME: pod_name,
File "/kfp/metadata_writer/metadata_helpers.py", line 280, in create_new_execution_in_existing_run_context
custom_properties=custom_properties,
TypeError: create_new_execution_in_existing_context() got an unexpected keyword argument 'custom_properties'
|
TypeError
|
def create_context_with_type(
store,
context_name: str,
type_name: str,
properties: dict = None,
type_properties: dict = None,
custom_properties: dict = None,
) -> metadata_store_pb2.Context:
# ! Context_name must be unique
context_type = get_or_create_context_type(
store=store,
type_name=type_name,
properties=type_properties,
)
context = metadata_store_pb2.Context(
name=context_name,
type_id=context_type.id,
properties=properties,
custom_properties=custom_properties,
)
context.id = store.put_contexts([context])[0]
return context
|
def create_context_with_type(
store,
context_name: str,
type_name: str,
properties: dict = None,
type_properties: dict = None,
) -> metadata_store_pb2.Context:
# ! Context_name must be unique
context_type = get_or_create_context_type(
store=store,
type_name=type_name,
properties=type_properties,
)
context = metadata_store_pb2.Context(
name=context_name,
type_id=context_type.id,
properties=properties,
)
context.id = store.put_contexts([context])[0]
return context
|
https://github.com/kubeflow/pipelines/issues/3552
|
Kubernetes Pod event: MODIFIED file-passing-pipelines-mbzrz-3001099438 1555702
Traceback (most recent call last):
File "/kfp/metadata_writer/metadata_writer.py", line 183, in <module>
KFP_POD_NAME_EXECUTION_PROPERTY_NAME: pod_name,
File "/kfp/metadata_writer/metadata_helpers.py", line 280, in create_new_execution_in_existing_run_context
custom_properties=custom_properties,
TypeError: create_new_execution_in_existing_context() got an unexpected keyword argument 'custom_properties'
|
TypeError
|
def get_or_create_context_with_type(
store,
context_name: str,
type_name: str,
properties: dict = None,
type_properties: dict = None,
custom_properties: dict = None,
) -> metadata_store_pb2.Context:
try:
context = get_context_by_name(store, context_name)
except:
context = create_context_with_type(
store=store,
context_name=context_name,
type_name=type_name,
properties=properties,
type_properties=type_properties,
custom_properties=custom_properties,
)
return context
# Verifying that the context has the expected type name
context_types = store.get_context_types_by_id([context.type_id])
assert len(context_types) == 1
if context_types[0].name != type_name:
raise RuntimeError(
'Context "{}" was found, but it has type "{}" instead of "{}"'.format(
context_name, context_types[0].name, type_name
)
)
return context
|
def get_or_create_context_with_type(
store,
context_name: str,
type_name: str,
properties: dict = None,
type_properties: dict = None,
) -> metadata_store_pb2.Context:
try:
context = get_context_by_name(store, context_name)
except:
context = create_context_with_type(
store=store,
context_name=context_name,
type_name=type_name,
properties=properties,
type_properties=type_properties,
)
return context
# Verifying that the context has the expected type name
context_types = store.get_context_types_by_id([context.type_id])
assert len(context_types) == 1
if context_types[0].name != type_name:
raise RuntimeError(
'Context "{}" was found, but it has type "{}" instead of "{}"'.format(
context_name, context_types[0].name, type_name
)
)
return context
|
https://github.com/kubeflow/pipelines/issues/3552
|
Kubernetes Pod event: MODIFIED file-passing-pipelines-mbzrz-3001099438 1555702
Traceback (most recent call last):
File "/kfp/metadata_writer/metadata_writer.py", line 183, in <module>
KFP_POD_NAME_EXECUTION_PROPERTY_NAME: pod_name,
File "/kfp/metadata_writer/metadata_helpers.py", line 280, in create_new_execution_in_existing_run_context
custom_properties=custom_properties,
TypeError: create_new_execution_in_existing_context() got an unexpected keyword argument 'custom_properties'
|
TypeError
|
def create_new_execution_in_existing_context(
store,
execution_type_name: str,
context_id: int,
properties: dict = None,
execution_type_properties: dict = None,
custom_properties: dict = None,
) -> metadata_store_pb2.Execution:
execution = create_execution_with_type(
store=store,
properties=properties,
custom_properties=custom_properties,
type_name=execution_type_name,
type_properties=execution_type_properties,
)
association = metadata_store_pb2.Association(
execution_id=execution.id,
context_id=context_id,
)
store.put_attributions_and_associations([], [association])
return execution
|
def create_new_execution_in_existing_context(
store,
execution_type_name: str,
context_id: int,
properties: dict = None,
execution_type_properties: dict = None,
) -> metadata_store_pb2.Execution:
execution = create_execution_with_type(
store=store,
properties=properties,
type_name=execution_type_name,
type_properties=execution_type_properties,
)
association = metadata_store_pb2.Association(
execution_id=execution.id,
context_id=context_id,
)
store.put_attributions_and_associations([], [association])
return execution
|
https://github.com/kubeflow/pipelines/issues/3552
|
Kubernetes Pod event: MODIFIED file-passing-pipelines-mbzrz-3001099438 1555702
Traceback (most recent call last):
File "/kfp/metadata_writer/metadata_writer.py", line 183, in <module>
KFP_POD_NAME_EXECUTION_PROPERTY_NAME: pod_name,
File "/kfp/metadata_writer/metadata_helpers.py", line 280, in create_new_execution_in_existing_run_context
custom_properties=custom_properties,
TypeError: create_new_execution_in_existing_context() got an unexpected keyword argument 'custom_properties'
|
TypeError
|
def create_new_artifact_event_and_attribution(
store,
execution_id: int,
context_id: int,
uri: str,
type_name: str,
event_type: metadata_store_pb2.Event.Type,
properties: dict = None,
artifact_type_properties: dict = None,
custom_properties: dict = None,
artifact_name_path: metadata_store_pb2.Event.Path = None,
milliseconds_since_epoch: int = None,
) -> metadata_store_pb2.Artifact:
artifact = create_artifact_with_type(
store=store,
uri=uri,
type_name=type_name,
type_properties=artifact_type_properties,
properties=properties,
custom_properties=custom_properties,
)
event = metadata_store_pb2.Event(
execution_id=execution_id,
artifact_id=artifact.id,
type=event_type,
path=artifact_name_path,
milliseconds_since_epoch=milliseconds_since_epoch,
)
store.put_events([event])
attribution = metadata_store_pb2.Attribution(
context_id=context_id,
artifact_id=artifact.id,
)
store.put_attributions_and_associations([attribution], [])
return artifact
|
def create_new_artifact_event_and_attribution(
store,
execution_id: int,
context_id: int,
uri: str,
type_name: str,
event_type: metadata_store_pb2.Event.Type,
properties: dict = None,
artifact_type_properties: dict = None,
artifact_name_path: metadata_store_pb2.Event.Path = None,
milliseconds_since_epoch: int = None,
) -> metadata_store_pb2.Artifact:
artifact = create_artifact_with_type(
store=store,
uri=uri,
type_name=type_name,
type_properties=artifact_type_properties,
properties=properties,
)
event = metadata_store_pb2.Event(
execution_id=execution_id,
artifact_id=artifact.id,
type=event_type,
path=artifact_name_path,
milliseconds_since_epoch=milliseconds_since_epoch,
)
store.put_events([event])
attribution = metadata_store_pb2.Attribution(
context_id=context_id,
artifact_id=artifact.id,
)
store.put_attributions_and_associations([attribution], [])
return artifact
|
https://github.com/kubeflow/pipelines/issues/3552
|
Kubernetes Pod event: MODIFIED file-passing-pipelines-mbzrz-3001099438 1555702
Traceback (most recent call last):
File "/kfp/metadata_writer/metadata_writer.py", line 183, in <module>
KFP_POD_NAME_EXECUTION_PROPERTY_NAME: pod_name,
File "/kfp/metadata_writer/metadata_helpers.py", line 280, in create_new_execution_in_existing_run_context
custom_properties=custom_properties,
TypeError: create_new_execution_in_existing_context() got an unexpected keyword argument 'custom_properties'
|
TypeError
|
def create_new_execution_in_existing_run_context(
store,
execution_type_name: str,
context_id: int,
pod_name: str,
# TODO: Remove when UX stops relying on thsese properties
pipeline_name: str = None,
run_id: str = None,
instance_id: str = None,
custom_properties=None,
) -> metadata_store_pb2.Execution:
pipeline_name = pipeline_name or "Context_" + str(context_id) + "_pipeline"
run_id = run_id or "Context_" + str(context_id) + "_run"
instance_id = instance_id or execution_type_name
return create_new_execution_in_existing_context(
store=store,
execution_type_name=execution_type_name,
context_id=context_id,
execution_type_properties={
EXECUTION_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.STRING,
EXECUTION_RUN_ID_PROPERTY_NAME: metadata_store_pb2.STRING,
EXECUTION_COMPONENT_ID_PROPERTY_NAME: metadata_store_pb2.STRING,
},
# TODO: Remove when UX stops relying on thsese properties
properties={
EXECUTION_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.Value(
string_value=pipeline_name
), # Mistakenly used for grouping in the UX
EXECUTION_RUN_ID_PROPERTY_NAME: metadata_store_pb2.Value(
string_value=run_id
),
EXECUTION_COMPONENT_ID_PROPERTY_NAME: metadata_store_pb2.Value(
string_value=instance_id
), # should set to task ID, not component ID
},
custom_properties={
KFP_POD_NAME_EXECUTION_PROPERTY_NAME: metadata_store_pb2.Value(
string_value=pod_name
),
},
)
|
def create_new_execution_in_existing_run_context(
store,
execution_type_name: str,
context_id: int,
# TODO: Remove when UX stops relying on thsese properties
pipeline_name: str = None,
run_id: str = None,
instance_id: str = None,
custom_properties=None,
) -> metadata_store_pb2.Execution:
pipeline_name = pipeline_name or "Context_" + str(context_id) + "_pipeline"
run_id = run_id or "Context_" + str(context_id) + "_run"
instance_id = instance_id or execution_type_name
return create_new_execution_in_existing_context(
store=store,
execution_type_name=execution_type_name,
context_id=context_id,
execution_type_properties={
EXECUTION_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.STRING,
EXECUTION_RUN_ID_PROPERTY_NAME: metadata_store_pb2.STRING,
EXECUTION_COMPONENT_ID_PROPERTY_NAME: metadata_store_pb2.STRING,
},
# TODO: Remove when UX stops relying on thsese properties
properties={
EXECUTION_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.Value(
string_value=pipeline_name
), # Mistakenly used for grouping in the UX
EXECUTION_RUN_ID_PROPERTY_NAME: metadata_store_pb2.Value(
string_value=run_id
),
EXECUTION_COMPONENT_ID_PROPERTY_NAME: metadata_store_pb2.Value(
string_value=instance_id
), # should set to task ID, not component ID
},
custom_properties=custom_properties,
)
|
https://github.com/kubeflow/pipelines/issues/3552
|
Kubernetes Pod event: MODIFIED file-passing-pipelines-mbzrz-3001099438 1555702
Traceback (most recent call last):
File "/kfp/metadata_writer/metadata_writer.py", line 183, in <module>
KFP_POD_NAME_EXECUTION_PROPERTY_NAME: pod_name,
File "/kfp/metadata_writer/metadata_helpers.py", line 280, in create_new_execution_in_existing_run_context
custom_properties=custom_properties,
TypeError: create_new_execution_in_existing_context() got an unexpected keyword argument 'custom_properties'
|
TypeError
|
def resnet_train(
project_id,
output,
region="us-central1",
model="bolts",
version="beta1",
tf_version="1.12",
train_csv="gs://bolts_image_dataset/bolt_images_train.csv",
validation_csv="gs://bolts_image_dataset/bolt_images_validate.csv",
labels="gs://bolts_image_dataset/labels.txt",
depth=50,
train_batch_size=1024,
eval_batch_size=1024,
steps_per_eval=250,
train_steps=10000,
num_train_images=218593,
num_eval_images=54648,
num_label_classes=10,
):
output_dir = os.path.join(str(output), "{{workflow.name}}")
preprocess_staging = os.path.join(output_dir, "staging")
preprocess_output = os.path.join(output_dir, "preprocessed_output")
train_output = os.path.join(output_dir, "model")
preprocess = resnet_preprocess_op(
project_id,
preprocess_output,
preprocess_staging,
train_csv,
validation_csv,
labels,
train_batch_size,
eval_batch_size,
).apply(gcp.use_gcp_secret())
train = resnet_train_op(
project_id,
preprocess_output,
train_output,
region,
depth,
train_batch_size,
eval_batch_size,
steps_per_eval,
train_steps,
num_train_images,
num_eval_images,
num_label_classes,
tf_version,
).apply(gcp.use_gcp_secret())
train.after(preprocess)
export_output = os.path.join(str(train.outputs["job_dir"]), "export")
deploy = resnet_deploy_op(
export_output, model, version, project_id, region, tf_version
).apply(gcp.use_gcp_secret())
|
def resnet_train(
project_id,
output,
region="us-central1",
model="bolts",
version="beta1",
tf_version="1.12",
train_csv="gs://bolts_image_dataset/bolt_images_train.csv",
validation_csv="gs://bolts_image_dataset/bolt_images_validate.csv",
labels="gs://bolts_image_dataset/labels.txt",
depth=50,
train_batch_size=1024,
eval_batch_size=1024,
steps_per_eval=250,
train_steps=10000,
num_train_images=218593,
num_eval_images=54648,
num_label_classes=10,
):
output_dir = os.path.join(str(output), "{{workflow.name}}")
preprocess_staging = os.path.join(output_dir, "staging")
preprocess_output = os.path.join(output_dir, "preprocessed_output")
train_output = os.path.join(output_dir, "model")
preprocess = resnet_preprocess_op(
project_id,
preprocess_output,
preprocess_staging,
train_csv,
validation_csv,
labels,
train_batch_size,
eval_batch_size,
).apply(gcp.use_gcp_secret())
train = resnet_train_op(
project_id,
preprocess_output,
train_output,
region,
depth,
train_batch_size,
eval_batch_size,
steps_per_eval,
train_steps,
num_train_images,
num_eval_images,
num_label_classes,
tf_version,
).apply(gcp.use_gcp_secret())
train.after(preprocess)
export_output = os.path.join(str(train.outputs["job-dir"]), "export")
deploy = resnet_deploy_op(
export_output, model, version, project_id, region, tf_version
).apply(gcp.use_gcp_secret())
|
https://github.com/kubeflow/pipelines/issues/1142
|
Step 20/31 : RUN find . -maxdepth 2 -name '*.py' -type f | while read pipeline; do dsl-compile --py "$pipeline" --output "$pipeline.tar.gz"; done
---> Running in 3205bef0c5cb
Traceback (most recent call last):
File "/usr/local/bin/dsl-compile", line 10, in <module>
sys.exit(main())
File "/usr/local/lib/python3.5/site-packages/kfp/compiler/main.py", line 103, in main
compile_pyfile(args.py, args.function, args.output, not args.disable_type_check)
File "/usr/local/lib/python3.5/site-packages/kfp/compiler/main.py", line 92, in compile_pyfile
_compile_pipeline_function(function_name, output_path, type_check)
File "/usr/local/lib/python3.5/site-packages/kfp/compiler/main.py", line 72, in _compile_pipeline_function
kfp.compiler.Compiler().compile(pipeline_func, output_path, type_check)
File "/usr/local/lib/python3.5/site-packages/kfp/compiler/compiler.py", line 644, in compile
workflow = self._compile(pipeline_func)
File "/usr/local/lib/python3.5/site-packages/kfp/compiler/compiler.py", line 596, in _compile
pipeline_func(*args_list)
File "./resnet-cmle/resnet-train-pipeline.py", line 127, in resnet_train
export_output = os.path.join(str(train.outputs['job-dir']), 'export')
KeyError: 'job-dir'
The command '/bin/sh -c find . -maxdepth 2 -name '*.py' -type f | while read pipeline; do dsl-compile --py "$pipeline" --output "$pipeline.tar.gz"; done' returned a non-zero code: 1
|
KeyError
|
def getFeed(urls):
feedPosts = []
with FuturesSession() as session:
futures = [
session.get(
"{instance}{user}".format(instance=NITTERINSTANCE, user=u.username)
)
for u in urls
]
for future in as_completed(futures):
res = future.result().content.decode("utf-8")
html = BeautifulSoup(res, "html.parser")
userFeed = html.find_all("div", attrs={"class": "timeline-item"})
if userFeed != []:
for post in userFeed[:-1]:
date_time_str = (
post.find("span", attrs={"class": "tweet-date"})
.find("a")["title"]
.replace(",", "")
)
time = datetime.datetime.now() - datetime.datetime.strptime(
date_time_str, "%d/%m/%Y %H:%M:%S"
)
if time.days >= 7:
continue
if post.find("div", attrs={"class": "pinned"}):
if post.find("div", attrs={"class": "pinned"}).find(
"span", attrs={"icon-pin"}
):
continue
newPost = twitterPost()
newPost.op = post.find("a", attrs={"class": "username"}).text
newPost.twitterName = post.find(
"a", attrs={"class": "fullname"}
).text
newPost.timeStamp = datetime.datetime.strptime(
date_time_str, "%d/%m/%Y %H:%M:%S"
)
newPost.date = (
post.find("span", attrs={"class": "tweet-date"}).find("a").text
)
newPost.content = Markup(
post.find("div", attrs={"class": "tweet-content"})
)
if post.find("div", attrs={"class": "retweet-header"}):
newPost.username = (
post.find("div", attrs={"class": "retweet-header"})
.find("div", attrs={"class": "icon-container"})
.text
)
newPost.isRT = True
else:
newPost.username = newPost.op
newPost.isRT = False
newPost.profilePic = (
NITTERINSTANCE
+ post.find("a", attrs={"class": "tweet-avatar"}).find("img")[
"src"
][1:]
)
newPost.url = (
NITTERINSTANCE
+ post.find("a", attrs={"class": "tweet-link"})["href"][1:]
)
if post.find("div", attrs={"class": "quote"}):
newPost.isReply = True
quote = post.find("div", attrs={"class": "quote"})
if quote.find("div", attrs={"class": "quote-text"}):
newPost.replyingTweetContent = Markup(
quote.find("div", attrs={"class": "quote-text"})
)
if quote.find("a", attrs={"class": "still-image"}):
newPost.replyAttachedImg = (
NITTERINSTANCE
+ quote.find("a", attrs={"class": "still-image"})[
"href"
][1:]
)
if quote.find("div", attrs={"class": "unavailable-quote"}):
newPost.replyingUser = "Unavailable"
else:
newPost.replyingUser = quote.find(
"a", attrs={"class": "username"}
).text
post.find("div", attrs={"class": "quote"}).decompose()
if post.find("div", attrs={"class": "attachments"}):
if not post.find(class_="quote"):
if post.find("div", attrs={"class": "attachments"}).find(
"a", attrs={"class": "still-image"}
):
newPost.attachedImg = (
NITTERINSTANCE
+ post.find(
"div", attrs={"class": "attachments"}
).find("a")["href"][1:]
)
feedPosts.append(newPost)
return feedPosts
|
def getFeed(urls):
feedPosts = []
with FuturesSession() as session:
futures = [
session.get(
"{instance}{user}".format(instance=NITTERINSTANCE, user=u.username)
)
for u in urls
]
for future in as_completed(futures):
res = future.result().content.decode("utf-8")
html = BeautifulSoup(res, "html.parser")
userFeed = html.find_all("div", attrs={"class": "timeline-item"})
if userFeed != []:
for post in userFeed[:-1]:
date_time_str = (
post.find("span", attrs={"class": "tweet-date"})
.find("a")["title"]
.replace(",", "")
)
time = datetime.datetime.now() - datetime.datetime.strptime(
date_time_str, "%d/%m/%Y %H:%M:%S"
)
if time.days >= 7:
continue
if post.find("div", attrs={"class": "pinned"}):
if post.find("div", attrs={"class": "pinned"}).find(
"span", attrs={"icon-pin"}
):
continue
newPost = twitterPost()
newPost.op = post.find("a", attrs={"class": "username"}).text
newPost.twitterName = post.find(
"a", attrs={"class": "fullname"}
).text
newPost.timeStamp = datetime.datetime.strptime(
date_time_str, "%d/%m/%Y %H:%M:%S"
)
newPost.date = (
post.find("span", attrs={"class": "tweet-date"}).find("a").text
)
newPost.content = Markup(
post.find("div", attrs={"class": "tweet-content"})
)
if post.find("div", attrs={"class": "retweet-header"}):
newPost.username = (
post.find("div", attrs={"class": "retweet-header"})
.find("div", attrs={"class": "icon-container"})
.text
)
newPost.isRT = True
else:
newPost.username = newPost.op
newPost.isRT = False
newPost.profilePic = (
NITTERINSTANCE
+ post.find("a", attrs={"class": "tweet-avatar"}).find("img")[
"src"
][1:]
)
newPost.url = (
NITTERINSTANCE
+ post.find("a", attrs={"class": "tweet-link"})["href"][1:]
)
if post.find("div", attrs={"class": "quote"}):
newPost.isReply = True
quote = post.find("div", attrs={"class": "quote"})
if quote.find("div", attrs={"class": "quote-text"}):
newPost.replyingTweetContent = Markup(
quote.find("div", attrs={"class": "quote-text"})
)
if quote.find("a", attrs={"class": "still-image"}):
newPost.replyAttachedImg = (
NITTERINSTANCE
+ quote.find("a", attrs={"class": "still-image"})[
"href"
][1:]
)
newPost.replyingUser = quote.find(
"a", attrs={"class": "username"}
).text
post.find("div", attrs={"class": "quote"}).decompose()
if post.find("div", attrs={"class": "attachments"}):
if not post.find(class_="quote"):
if post.find("div", attrs={"class": "attachments"}).find(
"a", attrs={"class": "still-image"}
):
newPost.attachedImg = (
NITTERINSTANCE
+ post.find(
"div", attrs={"class": "attachments"}
).find("a")["href"][1:]
)
feedPosts.append(newPost)
return feedPosts
|
https://github.com/ytorg/Yotter/issues/42
|
[2020-09-09 11:09:39,667] ERROR in app: Exception on /twitter [GET]
Traceback (most recent call last):
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask_login/utils.py", line 272, in decorated_view
return func(*args, **kwargs)
File "/home/ubuntu/Yotter/app/routes.py", line 61, in twitter
posts.extend(getFeed(followingList))
File "/home/ubuntu/Yotter/app/routes.py", line 672, in getFeed
newPost.replyingUser=quote.find('a', attrs={'class':'username'}).text
AttributeError: 'NoneType' object has no attribute 'text'
|
AttributeError
|
def getYoutubePosts(ids):
videos = []
with FuturesSession() as session:
futures = [
session.get(
"https://www.youtube.com/feeds/videos.xml?channel_id={id}".format(
id=id.channelId
)
)
for id in ids
]
for future in as_completed(futures):
resp = future.result()
rssFeed = feedparser.parse(resp.content)
for vid in rssFeed.entries:
try:
time = datetime.datetime.now() - datetime.datetime(
*vid.published_parsed[:6]
)
except:
time = 0
if time.days >= 7:
continue
video = ytPost()
try:
video.date = vid.published_parsed
except:
video.date = datetime.datetime.utcnow()
try:
video.timeStamp = getTimeDiff(vid.published_parsed)
except:
video.timeStamp = "Unknown"
video.channelName = vid.author_detail.name
video.channelId = vid.yt_channelid
video.channelUrl = vid.author_detail.href
video.id = vid.yt_videoid
video.videoTitle = vid.title
video.videoThumb = vid.media_thumbnail[0]["url"].replace("/", "~")
video.views = vid.media_statistics["views"]
video.description = vid.summary_detail.value
video.description = re.sub(
r"^https?:\/\/.*[\r\n]*",
"",
video.description[0:120] + "...",
flags=re.MULTILINE,
)
videos.append(video)
return videos
|
def getYoutubePosts(ids):
videos = []
with FuturesSession() as session:
futures = [
session.get(
"https://www.youtube.com/feeds/videos.xml?channel_id={id}".format(
id=id.channelId
)
)
for id in ids
]
for future in as_completed(futures):
resp = future.result()
rssFeed = feedparser.parse(resp.content)
for vid in rssFeed.entries:
time = datetime.datetime.now() - datetime.datetime(
*vid.published_parsed[:6]
)
if time.days >= 7:
continue
video = ytPost()
video.date = vid.published_parsed
video.timeStamp = getTimeDiff(vid.published_parsed)
video.channelName = vid.author_detail.name
video.channelId = vid.yt_channelid
video.channelUrl = vid.author_detail.href
video.id = vid.yt_videoid
video.videoTitle = vid.title
video.videoThumb = vid.media_thumbnail[0]["url"].replace("/", "~")
video.views = vid.media_statistics["views"]
video.description = vid.summary_detail.value
video.description = re.sub(
r"^https?:\/\/.*[\r\n]*",
"",
video.description[0:120] + "...",
flags=re.MULTILINE,
)
videos.append(video)
return videos
|
https://github.com/ytorg/Yotter/issues/44
|
Traceback (most recent call last):
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/feedparser.py", line 398, in __getattr__
return self.__getitem__(key)
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/feedparser.py", line 356, in __getitem__
return dict.__getitem__(self, key)
KeyError: 'published_parsed'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/flask_login/utils.py", line 272, in decorated_view
return func(*args, **kwargs)
File "/home/ubuntu/Yotter/app/routes.py", line 199, in youtube
videos = getYoutubePosts(ids)
File "/home/ubuntu/Yotter/app/routes.py", line 745, in getYoutubePosts
time = datetime.datetime.now() - datetime.datetime(*vid.published_parsed[:6])
File "/home/ubuntu/Yotter/venv/lib/python3.8/site-packages/feedparser.py", line 400, in __getattr__
raise AttributeError("object has no attribute '%s'" % key)
AttributeError: object has no attribute 'published_parsed'
|
KeyError
|
async def on_close(response: Response) -> None:
response.elapsed = datetime.timedelta(seconds=await timer.async_elapsed())
if hasattr(stream, "aclose"):
with map_exceptions(HTTPCORE_EXC_MAP, request=request):
await stream.aclose()
|
async def on_close(response: Response) -> None:
response.elapsed = datetime.timedelta(seconds=await timer.async_elapsed())
if hasattr(stream, "aclose"):
await stream.aclose()
|
https://github.com/encode/httpx/issues/1463
|
.tox/py38/lib/python3.8/site-packages/httpx/_client.py:1846: in __aexit__
await self.response.aclose()
.tox/py38/lib/python3.8/site-packages/httpx/_models.py:1299: in aclose
await self._on_close(self)
.tox/py38/lib/python3.8/site-packages/httpx/_client.py:1513: in on_close
await stream.aclose()
.tox/py38/lib/python3.8/site-packages/httpcore/_async/connection_pool.py:69: in aclose
await self.stream.aclose()
.tox/py38/lib/python3.8/site-packages/httpcore/_bytestreams.py:78: in aclose
await self._aclose_func()
.tox/py38/lib/python3.8/site-packages/httpcore/_async/http11.py:192: in _response_closed
await self.aclose()
.tox/py38/lib/python3.8/site-packages/httpcore/_async/http11.py:202: in aclose
await self.socket.aclose()
.tox/py38/lib/python3.8/site-packages/httpcore/_backends/asyncio.py:198: in aclose
await self.stream_writer.wait_closed() # type: ignore
/usr/lib64/python3.8/contextlib.py:131: in __exit__
self.gen.throw(type, value, traceback)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
map = {<class 'OSError'>: <class 'httpcore.CloseError'>}
@contextlib.contextmanager
def map_exceptions(map: Dict[Type[Exception], Type[Exception]]) -> Iterator[None]:
try:
yield
except Exception as exc: # noqa: PIE786
for from_exc, to_exc in map.items():
if isinstance(exc, from_exc):
raise to_exc(exc) from None
E httpcore.CloseError: [Errno 104] Connection reset by peer
.tox/py38/lib/python3.8/site-packages/httpcore/_exceptions.py:12: CloseError
------------------------------------------------- Captured log call --------------------------------------------------
ERROR asyncio:base_events.py:1707 Exception in callback _SelectorSocketTransport._call_connection_lost(None)
handle: <Handle _SelectorSocketTransport._call_connection_lost(None)>
Traceback (most recent call last):
File "/usr/lib64/python3.8/asyncio/events.py", line 81, in _run
self._context.run(self._callback, *self._args)
File "/usr/lib64/python3.8/asyncio/selector_events.py", line 970, in _call_connection_lost
super()._call_connection_lost(exc)
File "/usr/lib64/python3.8/asyncio/selector_events.py", line 730, in _call_connection_lost
self._sock.close()
File "/usr/lib64/python3.8/socket.py", line 500, in close
self._real_close()
File "/usr/lib64/python3.8/socket.py", line 494, in _real_close
_ss.close(self)
OSError: [Errno 9] Bad file descriptor
|
OSError
|
async def _send_single_request(self, request: Request, timeout: Timeout) -> Response:
"""
Sends a single request, without handling any redirections.
"""
transport = self._transport_for_url(request.url)
timer = Timer()
await timer.async_start()
with map_exceptions(HTTPCORE_EXC_MAP, request=request):
(status_code, headers, stream, ext) = await transport.arequest(
request.method.encode(),
request.url.raw,
headers=request.headers.raw,
stream=request.stream, # type: ignore
ext={"timeout": timeout.as_dict()},
)
async def on_close(response: Response) -> None:
response.elapsed = datetime.timedelta(seconds=await timer.async_elapsed())
if hasattr(stream, "aclose"):
with map_exceptions(HTTPCORE_EXC_MAP, request=request):
await stream.aclose()
response = Response(
status_code,
headers=headers,
stream=stream, # type: ignore
ext=ext,
request=request,
on_close=on_close,
)
self.cookies.extract_cookies(response)
status = f"{response.status_code} {response.reason_phrase}"
response_line = f"{response.http_version} {status}"
logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"')
return response
|
async def _send_single_request(self, request: Request, timeout: Timeout) -> Response:
"""
Sends a single request, without handling any redirections.
"""
transport = self._transport_for_url(request.url)
timer = Timer()
await timer.async_start()
with map_exceptions(HTTPCORE_EXC_MAP, request=request):
(status_code, headers, stream, ext) = await transport.arequest(
request.method.encode(),
request.url.raw,
headers=request.headers.raw,
stream=request.stream, # type: ignore
ext={"timeout": timeout.as_dict()},
)
async def on_close(response: Response) -> None:
response.elapsed = datetime.timedelta(seconds=await timer.async_elapsed())
if hasattr(stream, "aclose"):
await stream.aclose()
response = Response(
status_code,
headers=headers,
stream=stream, # type: ignore
ext=ext,
request=request,
on_close=on_close,
)
self.cookies.extract_cookies(response)
status = f"{response.status_code} {response.reason_phrase}"
response_line = f"{response.http_version} {status}"
logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"')
return response
|
https://github.com/encode/httpx/issues/1463
|
.tox/py38/lib/python3.8/site-packages/httpx/_client.py:1846: in __aexit__
await self.response.aclose()
.tox/py38/lib/python3.8/site-packages/httpx/_models.py:1299: in aclose
await self._on_close(self)
.tox/py38/lib/python3.8/site-packages/httpx/_client.py:1513: in on_close
await stream.aclose()
.tox/py38/lib/python3.8/site-packages/httpcore/_async/connection_pool.py:69: in aclose
await self.stream.aclose()
.tox/py38/lib/python3.8/site-packages/httpcore/_bytestreams.py:78: in aclose
await self._aclose_func()
.tox/py38/lib/python3.8/site-packages/httpcore/_async/http11.py:192: in _response_closed
await self.aclose()
.tox/py38/lib/python3.8/site-packages/httpcore/_async/http11.py:202: in aclose
await self.socket.aclose()
.tox/py38/lib/python3.8/site-packages/httpcore/_backends/asyncio.py:198: in aclose
await self.stream_writer.wait_closed() # type: ignore
/usr/lib64/python3.8/contextlib.py:131: in __exit__
self.gen.throw(type, value, traceback)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
map = {<class 'OSError'>: <class 'httpcore.CloseError'>}
@contextlib.contextmanager
def map_exceptions(map: Dict[Type[Exception], Type[Exception]]) -> Iterator[None]:
try:
yield
except Exception as exc: # noqa: PIE786
for from_exc, to_exc in map.items():
if isinstance(exc, from_exc):
raise to_exc(exc) from None
E httpcore.CloseError: [Errno 104] Connection reset by peer
.tox/py38/lib/python3.8/site-packages/httpcore/_exceptions.py:12: CloseError
------------------------------------------------- Captured log call --------------------------------------------------
ERROR asyncio:base_events.py:1707 Exception in callback _SelectorSocketTransport._call_connection_lost(None)
handle: <Handle _SelectorSocketTransport._call_connection_lost(None)>
Traceback (most recent call last):
File "/usr/lib64/python3.8/asyncio/events.py", line 81, in _run
self._context.run(self._callback, *self._args)
File "/usr/lib64/python3.8/asyncio/selector_events.py", line 970, in _call_connection_lost
super()._call_connection_lost(exc)
File "/usr/lib64/python3.8/asyncio/selector_events.py", line 730, in _call_connection_lost
self._sock.close()
File "/usr/lib64/python3.8/socket.py", line 500, in close
self._real_close()
File "/usr/lib64/python3.8/socket.py", line 494, in _real_close
_ss.close(self)
OSError: [Errno 9] Bad file descriptor
|
OSError
|
def __init__(
self, url: typing.Union["URL", str, RawURL] = "", params: QueryParamTypes = None
) -> None:
if isinstance(url, (str, tuple)):
if isinstance(url, tuple):
raw_scheme, raw_host, port, raw_path = url
scheme = raw_scheme.decode("ascii")
host = raw_host.decode("ascii")
if host and ":" in host and host[0] != "[":
# it's an IPv6 address, so it should be enclosed in "[" and "]"
# ref: https://tools.ietf.org/html/rfc2732#section-2
# ref: https://tools.ietf.org/html/rfc3986#section-3.2.2
host = f"[{host}]"
port_str = "" if port is None else f":{port}"
path = raw_path.decode("ascii")
url = f"{scheme}://{host}{port_str}{path}"
try:
self._uri_reference = rfc3986.iri_reference(url).encode()
except rfc3986.exceptions.InvalidAuthority as exc:
raise InvalidURL(message=str(exc)) from None
if self.is_absolute_url:
# We don't want to normalize relative URLs, since doing so
# removes any leading `../` portion.
self._uri_reference = self._uri_reference.normalize()
elif isinstance(url, URL):
self._uri_reference = url._uri_reference
else:
raise TypeError(
f"Invalid type for url. Expected str or httpx.URL, got {type(url)}"
)
# Add any query parameters, merging with any in the URL if needed.
if params:
if self._uri_reference.query:
url_params = QueryParams(self._uri_reference.query)
url_params.update(params)
query_string = str(url_params)
else:
query_string = str(QueryParams(params))
self._uri_reference = self._uri_reference.copy_with(query=query_string)
|
def __init__(
self, url: typing.Union["URL", str, RawURL] = "", params: QueryParamTypes = None
) -> None:
if isinstance(url, (str, tuple)):
if isinstance(url, tuple):
raw_scheme, raw_host, port, raw_path = url
scheme = raw_scheme.decode("ascii")
host = raw_host.decode("ascii")
port_str = "" if port is None else f":{port}"
path = raw_path.decode("ascii")
url = f"{scheme}://{host}{port_str}{path}"
try:
self._uri_reference = rfc3986.iri_reference(url).encode()
except rfc3986.exceptions.InvalidAuthority as exc:
raise InvalidURL(message=str(exc)) from None
if self.is_absolute_url:
# We don't want to normalize relative URLs, since doing so
# removes any leading `../` portion.
self._uri_reference = self._uri_reference.normalize()
elif isinstance(url, URL):
self._uri_reference = url._uri_reference
else:
raise TypeError(
f"Invalid type for url. Expected str or httpx.URL, got {type(url)}"
)
# Add any query parameters, merging with any in the URL if needed.
if params:
if self._uri_reference.query:
url_params = QueryParams(self._uri_reference.query)
url_params.update(params)
query_string = str(url_params)
else:
query_string = str(QueryParams(params))
self._uri_reference = self._uri_reference.copy_with(query=query_string)
|
https://github.com/encode/httpx/issues/1311
|
<Response [200]>
<Response [200]>
<Response [200 OK]>
Traceback (most recent call last):
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_exceptions.py", line 339, in map_exceptions
yield
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 858, in _send_single_request
ext={"timeout": timeout.as_dict()},
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_sync/connection_pool.py", line 195, in request
method, url, headers=headers, stream=stream, ext=ext
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_sync/connection.py", line 87, in request
self.socket = self._open_socket(timeout)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_sync/connection.py", line 113, in _open_socket
local_address=self.local_address,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_backends/sync.py", line 144, in open_tcp_stream
return SyncSocketStream(sock=sock)
File "/home/user/.local/share/pyenv/versions/3.7.6/lib/python3.7/contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_exceptions.py", line 12, in map_exceptions
raise to_exc(exc) from None
httpcore.ConnectError: [Errno -2] Name or service not known
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "kek.py", line 11, in <module>
print(httpx.get(url6))
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_api.py", line 193, in get
trust_env=trust_env,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_api.py", line 104, in request
allow_redirects=allow_redirects,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 722, in request
request, auth=auth, allow_redirects=allow_redirects, timeout=timeout
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 758, in send
history=[],
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 795, in _send_handling_auth
history=history,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 823, in _send_handling_redirects
response = self._send_single_request(request, timeout)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 858, in _send_single_request
ext={"timeout": timeout.as_dict()},
File "/home/user/.local/share/pyenv/versions/3.7.6/lib/python3.7/contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_exceptions.py", line 356, in map_exceptions
raise mapped_exc(message, **kwargs) from exc # type: ignore
httpx.ConnectError: [Errno -2] Name or service not known
|
httpcore.ConnectError
|
def host(self) -> str:
"""
The URL host as a string.
Always normlized to lowercase, and IDNA encoded.
Examples:
url = httpx.URL("http://www.EXAMPLE.org")
assert url.host == "www.example.org"
url = httpx.URL("http://中国.icom.museum")
assert url.host == "xn--fiqs8s.icom.museum"
url = httpx.URL("https://[::ffff:192.168.0.1]")
assert url.host == "::ffff:192.168.0.1"
"""
host: str = self._uri_reference.host
if host and ":" in host and host[0] == "[":
# it's an IPv6 address
host = host.lstrip("[").rstrip("]")
return host or ""
|
def host(self) -> str:
"""
The URL host as a string.
Always normlized to lowercase, and IDNA encoded.
Examples:
url = httpx.URL("http://www.EXAMPLE.org")
assert url.host == "www.example.org"
url = httpx.URL("http://中国.icom.museum")
assert url.host == "xn--fiqs8s.icom.museum"
"""
return self._uri_reference.host or ""
|
https://github.com/encode/httpx/issues/1311
|
<Response [200]>
<Response [200]>
<Response [200 OK]>
Traceback (most recent call last):
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_exceptions.py", line 339, in map_exceptions
yield
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 858, in _send_single_request
ext={"timeout": timeout.as_dict()},
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_sync/connection_pool.py", line 195, in request
method, url, headers=headers, stream=stream, ext=ext
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_sync/connection.py", line 87, in request
self.socket = self._open_socket(timeout)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_sync/connection.py", line 113, in _open_socket
local_address=self.local_address,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_backends/sync.py", line 144, in open_tcp_stream
return SyncSocketStream(sock=sock)
File "/home/user/.local/share/pyenv/versions/3.7.6/lib/python3.7/contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_exceptions.py", line 12, in map_exceptions
raise to_exc(exc) from None
httpcore.ConnectError: [Errno -2] Name or service not known
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "kek.py", line 11, in <module>
print(httpx.get(url6))
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_api.py", line 193, in get
trust_env=trust_env,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_api.py", line 104, in request
allow_redirects=allow_redirects,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 722, in request
request, auth=auth, allow_redirects=allow_redirects, timeout=timeout
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 758, in send
history=[],
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 795, in _send_handling_auth
history=history,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 823, in _send_handling_redirects
response = self._send_single_request(request, timeout)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 858, in _send_single_request
ext={"timeout": timeout.as_dict()},
File "/home/user/.local/share/pyenv/versions/3.7.6/lib/python3.7/contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_exceptions.py", line 356, in map_exceptions
raise mapped_exc(message, **kwargs) from exc # type: ignore
httpx.ConnectError: [Errno -2] Name or service not known
|
httpcore.ConnectError
|
def copy_with(self, **kwargs: typing.Any) -> "URL":
"""
Copy this URL, returning a new URL with some components altered.
Accepts the same set of parameters as the components that are made
available via properties on the `URL` class.
For example:
url = httpx.URL("https://www.example.com").copy_with(username="jo@gmail.com", password="a secret")
assert url == "https://jo%40email.com:a%20secret@www.example.com"
"""
allowed = {
"scheme": str,
"username": str,
"password": str,
"userinfo": bytes,
"host": str,
"port": int,
"netloc": str,
"path": str,
"query": bytes,
"raw_path": bytes,
"fragment": str,
}
for key, value in kwargs.items():
if key not in allowed:
message = f"{key!r} is an invalid keyword argument for copy_with()"
raise TypeError(message)
if value is not None and not isinstance(value, allowed[key]):
expected = allowed[key].__name__
seen = type(value).__name__
message = f"Argument {key!r} must be {expected} but got {seen}"
raise TypeError(message)
# Replace username, password, userinfo, host, port, netloc with "authority" for rfc3986
if "username" in kwargs or "password" in kwargs:
# Consolidate username and password into userinfo.
username = quote(kwargs.pop("username", self.username) or "")
password = quote(kwargs.pop("password", self.password) or "")
userinfo = f"{username}:{password}" if password else username
kwargs["userinfo"] = userinfo.encode("ascii")
if "host" in kwargs or "port" in kwargs:
# Consolidate host and port into netloc.
host = kwargs.pop("host", self.host) or ""
port = kwargs.pop("port", self.port)
if host and ":" in host and host[0] != "[":
# it's an IPv6 address, so it should be hidden under bracket
host = f"[{host}]"
kwargs["netloc"] = f"{host}:{port}" if port is not None else host
if "userinfo" in kwargs or "netloc" in kwargs:
# Consolidate userinfo and netloc into authority.
userinfo = (kwargs.pop("userinfo", self.userinfo) or b"").decode("ascii")
netloc = kwargs.pop("netloc", self.netloc) or ""
authority = f"{userinfo}@{netloc}" if userinfo else netloc
kwargs["authority"] = authority
if "raw_path" in kwargs:
raw_path = kwargs.pop("raw_path") or b""
path, has_query, query = raw_path.decode("ascii").partition("?")
kwargs["path"] = path
kwargs["query"] = query if has_query else None
else:
# Ensure path=<url quoted str> for rfc3986
if kwargs.get("path") is not None:
kwargs["path"] = quote(kwargs["path"])
# Ensure query=<str> for rfc3986
if kwargs.get("query") is not None:
kwargs["query"] = kwargs["query"].decode("ascii")
return URL(self._uri_reference.copy_with(**kwargs).unsplit())
|
def copy_with(self, **kwargs: typing.Any) -> "URL":
"""
Copy this URL, returning a new URL with some components altered.
Accepts the same set of parameters as the components that are made
available via properties on the `URL` class.
For example:
url = httpx.URL("https://www.example.com").copy_with(username="jo@gmail.com", password="a secret")
assert url == "https://jo%40email.com:a%20secret@www.example.com"
"""
allowed = {
"scheme": str,
"username": str,
"password": str,
"userinfo": bytes,
"host": str,
"port": int,
"netloc": str,
"path": str,
"query": bytes,
"raw_path": bytes,
"fragment": str,
}
for key, value in kwargs.items():
if key not in allowed:
message = f"{key!r} is an invalid keyword argument for copy_with()"
raise TypeError(message)
if value is not None and not isinstance(value, allowed[key]):
expected = allowed[key].__name__
seen = type(value).__name__
message = f"Argument {key!r} must be {expected} but got {seen}"
raise TypeError(message)
# Replace username, password, userinfo, host, port, netloc with "authority" for rfc3986
if "username" in kwargs or "password" in kwargs:
# Consolidate username and password into userinfo.
username = quote(kwargs.pop("username", self.username) or "")
password = quote(kwargs.pop("password", self.password) or "")
userinfo = f"{username}:{password}" if password else username
kwargs["userinfo"] = userinfo.encode("ascii")
if "host" in kwargs or "port" in kwargs:
# Consolidate host and port into netloc.
host = kwargs.pop("host", self.host) or ""
port = kwargs.pop("port", self.port)
kwargs["netloc"] = f"{host}:{port}" if port is not None else host
if "userinfo" in kwargs or "netloc" in kwargs:
# Consolidate userinfo and netloc into authority.
userinfo = (kwargs.pop("userinfo", self.userinfo) or b"").decode("ascii")
netloc = kwargs.pop("netloc", self.netloc) or ""
authority = f"{userinfo}@{netloc}" if userinfo else netloc
kwargs["authority"] = authority
if "raw_path" in kwargs:
raw_path = kwargs.pop("raw_path") or b""
path, has_query, query = raw_path.decode("ascii").partition("?")
kwargs["path"] = path
kwargs["query"] = query if has_query else None
else:
# Ensure path=<url quoted str> for rfc3986
if kwargs.get("path") is not None:
kwargs["path"] = quote(kwargs["path"])
# Ensure query=<str> for rfc3986
if kwargs.get("query") is not None:
kwargs["query"] = kwargs["query"].decode("ascii")
return URL(self._uri_reference.copy_with(**kwargs).unsplit())
|
https://github.com/encode/httpx/issues/1311
|
<Response [200]>
<Response [200]>
<Response [200 OK]>
Traceback (most recent call last):
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_exceptions.py", line 339, in map_exceptions
yield
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 858, in _send_single_request
ext={"timeout": timeout.as_dict()},
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_sync/connection_pool.py", line 195, in request
method, url, headers=headers, stream=stream, ext=ext
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_sync/connection.py", line 87, in request
self.socket = self._open_socket(timeout)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_sync/connection.py", line 113, in _open_socket
local_address=self.local_address,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_backends/sync.py", line 144, in open_tcp_stream
return SyncSocketStream(sock=sock)
File "/home/user/.local/share/pyenv/versions/3.7.6/lib/python3.7/contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpcore/_exceptions.py", line 12, in map_exceptions
raise to_exc(exc) from None
httpcore.ConnectError: [Errno -2] Name or service not known
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "kek.py", line 11, in <module>
print(httpx.get(url6))
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_api.py", line 193, in get
trust_env=trust_env,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_api.py", line 104, in request
allow_redirects=allow_redirects,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 722, in request
request, auth=auth, allow_redirects=allow_redirects, timeout=timeout
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 758, in send
history=[],
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 795, in _send_handling_auth
history=history,
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 823, in _send_handling_redirects
response = self._send_single_request(request, timeout)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_client.py", line 858, in _send_single_request
ext={"timeout": timeout.as_dict()},
File "/home/user/.local/share/pyenv/versions/3.7.6/lib/python3.7/contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "/home/user/repos/lb-test/.venv/lib/python3.7/site-packages/httpx/_exceptions.py", line 356, in map_exceptions
raise mapped_exc(message, **kwargs) from exc # type: ignore
httpx.ConnectError: [Errno -2] Name or service not known
|
httpcore.ConnectError
|
def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]:
"""
Returns a list of the raw header items, as byte pairs.
"""
return list(self._list)
|
def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]:
"""
Returns a list of the raw header items, as byte pairs.
May be mutated in-place.
"""
return self._list
|
https://github.com/encode/httpx/issues/1203
|
import httpx
client = httpx.Client()
request = client.build_request("GET", "https://google.com")
requests.headers
Headers({'host': 'google.com', 'user-agent': 'python-httpx/0.14.1', 'accept': '*/*', 'accept-encoding': 'gzip, deflate, br', 'connection': 'keep-alive'})
"host" in request.headers
False
request.headers.pop("host")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "D:\programs\anaconda3\lib\_collections_abc.py", line 801, in pop
del self[key]
File "E:\projects\pycharm\httpx\httpx\_models.py", line 536, in __delitem__
del self._dict[del_key]
KeyError: b'host'
|
KeyError
|
def prepare(self) -> None:
for key, value in self.stream.get_headers().items():
# Ignore Transfer-Encoding if the Content-Length has been set explicitly.
if key.lower() == "transfer-encoding" and "content-length" in self.headers:
continue
self.headers.setdefault(key, value)
auto_headers: typing.List[typing.Tuple[bytes, bytes]] = []
has_host = "host" in self.headers
has_content_length = (
"content-length" in self.headers or "transfer-encoding" in self.headers
)
has_user_agent = "user-agent" in self.headers
has_accept = "accept" in self.headers
has_accept_encoding = "accept-encoding" in self.headers
has_connection = "connection" in self.headers
if not has_host:
url = self.url
if url.userinfo:
url = url.copy_with(username=None, password=None)
auto_headers.append((b"host", url.authority.encode("ascii")))
if not has_content_length and self.method in ("POST", "PUT", "PATCH"):
auto_headers.append((b"content-length", b"0"))
if not has_user_agent:
auto_headers.append((b"user-agent", USER_AGENT.encode("ascii")))
if not has_accept:
auto_headers.append((b"accept", b"*/*"))
if not has_accept_encoding:
auto_headers.append((b"accept-encoding", ACCEPT_ENCODING.encode()))
if not has_connection:
auto_headers.append((b"connection", b"keep-alive"))
self.headers = Headers(auto_headers + self.headers.raw)
|
def prepare(self) -> None:
for key, value in self.stream.get_headers().items():
# Ignore Transfer-Encoding if the Content-Length has been set explicitly.
if key.lower() == "transfer-encoding" and "content-length" in self.headers:
continue
self.headers.setdefault(key, value)
auto_headers: typing.List[typing.Tuple[bytes, bytes]] = []
has_host = "host" in self.headers
has_content_length = (
"content-length" in self.headers or "transfer-encoding" in self.headers
)
has_user_agent = "user-agent" in self.headers
has_accept = "accept" in self.headers
has_accept_encoding = "accept-encoding" in self.headers
has_connection = "connection" in self.headers
if not has_host:
url = self.url
if url.userinfo:
url = url.copy_with(username=None, password=None)
auto_headers.append((b"host", url.authority.encode("ascii")))
if not has_content_length and self.method in ("POST", "PUT", "PATCH"):
auto_headers.append((b"content-length", b"0"))
if not has_user_agent:
auto_headers.append((b"user-agent", USER_AGENT.encode("ascii")))
if not has_accept:
auto_headers.append((b"accept", b"*/*"))
if not has_accept_encoding:
auto_headers.append((b"accept-encoding", ACCEPT_ENCODING.encode()))
if not has_connection:
auto_headers.append((b"connection", b"keep-alive"))
for item in reversed(auto_headers):
self.headers.raw.insert(0, item)
|
https://github.com/encode/httpx/issues/1203
|
import httpx
client = httpx.Client()
request = client.build_request("GET", "https://google.com")
requests.headers
Headers({'host': 'google.com', 'user-agent': 'python-httpx/0.14.1', 'accept': '*/*', 'accept-encoding': 'gzip, deflate, br', 'connection': 'keep-alive'})
"host" in request.headers
False
request.headers.pop("host")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "D:\programs\anaconda3\lib\_collections_abc.py", line 801, in pop
del self[key]
File "E:\projects\pycharm\httpx\httpx\_models.py", line 536, in __delitem__
del self._dict[del_key]
KeyError: b'host'
|
KeyError
|
def username(self) -> str:
userinfo = self._uri_reference.userinfo or ""
return unquote(userinfo.partition(":")[0])
|
def username(self) -> str:
userinfo = self._uri_reference.userinfo or ""
return userinfo.partition(":")[0]
|
https://github.com/encode/httpx/issues/328
|
import httpx
httpx.URL("https://user@gmail.com:password@google.com")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/sethmlarson/Desktop/http3/httpx/models.py", line 112, in __init__
raise InvalidURL("No host included in URL.")
httpx.exceptions.InvalidURL: No host included in URL.
|
httpx.exceptions.InvalidURL
|
def password(self) -> str:
userinfo = self._uri_reference.userinfo or ""
return unquote(userinfo.partition(":")[2])
|
def password(self) -> str:
userinfo = self._uri_reference.userinfo or ""
return userinfo.partition(":")[2]
|
https://github.com/encode/httpx/issues/328
|
import httpx
httpx.URL("https://user@gmail.com:password@google.com")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/sethmlarson/Desktop/http3/httpx/models.py", line 112, in __init__
raise InvalidURL("No host included in URL.")
httpx.exceptions.InvalidURL: No host included in URL.
|
httpx.exceptions.InvalidURL
|
def copy_with(self, **kwargs: typing.Any) -> "URL":
if (
"username" in kwargs
or "password" in kwargs
or "host" in kwargs
or "port" in kwargs
):
host = kwargs.pop("host", self.host)
port = kwargs.pop("port", self.port)
username = quote(kwargs.pop("username", self.username) or "")
password = quote(kwargs.pop("password", self.password) or "")
authority = host
if port is not None:
authority += f":{port}"
if username:
userpass = username
if password:
userpass += f":{password}"
authority = f"{userpass}@{authority}"
kwargs["authority"] = authority
return URL(self._uri_reference.copy_with(**kwargs).unsplit())
|
def copy_with(self, **kwargs: typing.Any) -> "URL":
if (
"username" in kwargs
or "password" in kwargs
or "host" in kwargs
or "port" in kwargs
):
host = kwargs.pop("host", self.host)
port = kwargs.pop("port", self.port)
username = kwargs.pop("username", self.username)
password = kwargs.pop("password", self.password)
authority = host
if port is not None:
authority += f":{port}"
if username:
userpass = username
if password:
userpass += f":{password}"
authority = f"{userpass}@{authority}"
kwargs["authority"] = authority
return URL(self._uri_reference.copy_with(**kwargs).unsplit())
|
https://github.com/encode/httpx/issues/328
|
import httpx
httpx.URL("https://user@gmail.com:password@google.com")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/sethmlarson/Desktop/http3/httpx/models.py", line 112, in __init__
raise InvalidURL("No host included in URL.")
httpx.exceptions.InvalidURL: No host included in URL.
|
httpx.exceptions.InvalidURL
|
def join(self, url: URLTypes) -> "URL":
"""
Return an absolute URL, using this URL as the base.
"""
if self.is_relative_url:
return URL(url)
# We drop any fragment portion, because RFC 3986 strictly
# treats URLs with a fragment portion as not being absolute URLs.
base_uri = self._uri_reference.copy_with(fragment=None)
relative_url = URL(url)
return URL(relative_url._uri_reference.resolve_with(base_uri).unsplit())
|
def join(self, url: URLTypes) -> "URL":
"""
Return an absolute URL, using given this URL as the base.
"""
if self.is_relative_url:
return URL(url)
# We drop any fragment portion, because RFC 3986 strictly
# treats URLs with a fragment portion as not being absolute URLs.
base_uri = self._uri_reference.copy_with(fragment=None)
relative_url = URL(url)
return URL(relative_url._uri_reference.resolve_with(base_uri).unsplit())
|
https://github.com/encode/httpx/issues/328
|
import httpx
httpx.URL("https://user@gmail.com:password@google.com")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/sethmlarson/Desktop/http3/httpx/models.py", line 112, in __init__
raise InvalidURL("No host included in URL.")
httpx.exceptions.InvalidURL: No host included in URL.
|
httpx.exceptions.InvalidURL
|
def init_pool_manager(
self,
proxy: typing.Optional[Proxy],
ssl_context: ssl.SSLContext,
num_pools: int,
maxsize: int,
block: bool,
) -> typing.Union[urllib3.PoolManager, urllib3.ProxyManager]:
if proxy is None:
return urllib3.PoolManager(
ssl_context=ssl_context,
num_pools=num_pools,
maxsize=maxsize,
block=block,
)
else:
return urllib3.ProxyManager(
proxy_url=str(proxy.url),
proxy_headers=dict(proxy.headers),
ssl_context=ssl_context,
num_pools=num_pools,
maxsize=maxsize,
block=block,
)
|
def init_pool_manager(
self,
proxy: typing.Optional[Proxy],
ssl_context: ssl.SSLContext,
num_pools: int,
maxsize: int,
block: bool,
) -> typing.Union[urllib3.PoolManager, urllib3.ProxyManager]:
if proxy is None:
return urllib3.PoolManager(
ssl_context=ssl_context,
num_pools=num_pools,
maxsize=maxsize,
block=block,
)
else:
return urllib3.ProxyManager(
proxy_url=proxy.url,
proxy_headers=dict(proxy.headers),
ssl_context=ssl_context,
num_pools=num_pools,
maxsize=maxsize,
block=block,
)
|
https://github.com/encode/httpx/issues/762
|
$ ipython3 -c 'import httpx; r = httpx.get("https://www.google.com")'
parse_url http://127.0.0.1:1234 <class 'httpx.models.URL'>
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-1-d6fe3101235f> in <module>
----> 1 import httpx; r = httpx.get("https://www.google.com")
/usr/local/lib/python3.7/site-packages/httpx/api.py in get(url, params, headers, cookies, auth, allow_redirects, cert, verify, timeout, trust_env)
168 verify=verify,
169 timeout=timeout,
--> 170 trust_env=trust_env,
171 )
172
/usr/local/lib/python3.7/site-packages/httpx/api.py in request(method, url, params, data, files, json, headers, cookies, auth, timeout, allow_redirects, verify, cert, trust_env)
82 """
83 with Client(
---> 84 cert=cert, verify=verify, timeout=timeout, trust_env=trust_env,
85 ) as client:
86 return client.request(
/usr/local/lib/python3.7/site-packages/httpx/client.py in __init__(self, auth, params, headers, cookies, verify, cert, proxies, timeout, pool_limits, max_redirects, base_url, dispatch, app, trust_env)
477 trust_env=trust_env,
478 )
--> 479 for key, proxy in proxy_map.items()
480 }
481
/usr/local/lib/python3.7/site-packages/httpx/client.py in <dictcomp>(.0)
477 trust_env=trust_env,
478 )
--> 479 for key, proxy in proxy_map.items()
480 }
481
/usr/local/lib/python3.7/site-packages/httpx/client.py in init_proxy_dispatch(self, proxy, verify, cert, pool_limits, trust_env)
512 cert=cert,
513 pool_limits=pool_limits,
--> 514 trust_env=trust_env,
515 )
516
/usr/local/lib/python3.7/site-packages/httpx/dispatch/urllib3.py in __init__(self, proxy, verify, cert, trust_env, pool_limits)
58 num_pools=num_pools,
59 maxsize=maxsize,
---> 60 block=block,
61 )
62
/usr/local/lib/python3.7/site-packages/httpx/dispatch/urllib3.py in init_pool_manager(self, proxy, ssl_context, num_pools, maxsize, block)
83 num_pools=num_pools,
84 maxsize=maxsize,
---> 85 block=block,
86 )
87
/usr/local/lib/python3.7/site-packages/urllib3/poolmanager.py in __init__(self, proxy_url, num_pools, headers, proxy_headers, **connection_pool_kw)
412 proxy_url.port,
413 )
--> 414 proxy = parse_url(proxy_url)
415 if not proxy.port:
416 port = port_by_scheme.get(proxy.scheme, 80)
/usr/local/lib/python3.7/site-packages/urllib3/util/url.py in parse_url(url)
361 print('parse_url', url, type(url))
362 source_url = url
--> 363 if not SCHEME_RE.search(url):
364 url = "//" + url
365
TypeError: expected string or bytes-like object
|
TypeError
|
def __init__(self) -> None:
self.first_attempt = True
self.decompressor = zlib.decompressobj()
|
def __init__(self) -> None:
self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS)
|
https://github.com/encode/httpx/issues/756
|
httpx.get('http://localhost:9999')
Traceback (most recent call last):
File "/Users/jamie/code/httpx/httpx/decoders.py", line 52, in decode
return self.decompressor.decompress(data)
zlib.error: Error -3 while decompressing data: invalid stored block lengths
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/jamie/code/httpx/httpx/api.py", line 170, in get
trust_env=trust_env,
File "/Users/jamie/code/httpx/httpx/api.py", line 96, in request
allow_redirects=allow_redirects,
File "/Users/jamie/code/httpx/httpx/client.py", line 568, in request
request, auth=auth, allow_redirects=allow_redirects, timeout=timeout,
File "/Users/jamie/code/httpx/httpx/client.py", line 593, in send
response.read()
File "/Users/jamie/code/httpx/httpx/models.py", line 900, in read
self._content = b"".join([part for part in self.iter_bytes()])
File "/Users/jamie/code/httpx/httpx/models.py", line 900, in <listcomp>
self._content = b"".join([part for part in self.iter_bytes()])
File "/Users/jamie/code/httpx/httpx/models.py", line 912, in iter_bytes
yield self.decoder.decode(chunk)
File "/Users/jamie/code/httpx/httpx/decoders.py", line 54, in decode
raise DecodingError from exc
httpx.exceptions.DecodingError
|
zlib.error
|
def decode(self, data: bytes) -> bytes:
was_first_attempt = self.first_attempt
self.first_attempt = False
try:
return self.decompressor.decompress(data)
except zlib.error as exc:
if was_first_attempt:
self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS)
return self.decode(data)
raise DecodingError from exc
|
def decode(self, data: bytes) -> bytes:
try:
return self.decompressor.decompress(data)
except zlib.error as exc:
raise DecodingError from exc
|
https://github.com/encode/httpx/issues/756
|
httpx.get('http://localhost:9999')
Traceback (most recent call last):
File "/Users/jamie/code/httpx/httpx/decoders.py", line 52, in decode
return self.decompressor.decompress(data)
zlib.error: Error -3 while decompressing data: invalid stored block lengths
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/jamie/code/httpx/httpx/api.py", line 170, in get
trust_env=trust_env,
File "/Users/jamie/code/httpx/httpx/api.py", line 96, in request
allow_redirects=allow_redirects,
File "/Users/jamie/code/httpx/httpx/client.py", line 568, in request
request, auth=auth, allow_redirects=allow_redirects, timeout=timeout,
File "/Users/jamie/code/httpx/httpx/client.py", line 593, in send
response.read()
File "/Users/jamie/code/httpx/httpx/models.py", line 900, in read
self._content = b"".join([part for part in self.iter_bytes()])
File "/Users/jamie/code/httpx/httpx/models.py", line 900, in <listcomp>
self._content = b"".join([part for part in self.iter_bytes()])
File "/Users/jamie/code/httpx/httpx/models.py", line 912, in iter_bytes
yield self.decoder.decode(chunk)
File "/Users/jamie/code/httpx/httpx/decoders.py", line 54, in decode
raise DecodingError from exc
httpx.exceptions.DecodingError
|
zlib.error
|
def __init__(
self,
stream_reader: asyncio.StreamReader,
stream_writer: asyncio.StreamWriter,
):
self.stream_reader = stream_reader
self.stream_writer = stream_writer
self.read_lock = asyncio.Lock()
self.write_lock = asyncio.Lock()
self._inner: typing.Optional[SocketStream] = None
|
def __init__(
self,
stream_reader: asyncio.StreamReader,
stream_writer: asyncio.StreamWriter,
):
self.stream_reader = stream_reader
self.stream_writer = stream_writer
self.read_lock = asyncio.Lock()
self._inner: typing.Optional[SocketStream] = None
|
https://github.com/encode/httpx/issues/551
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 57, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 19, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 43, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 50, in _send_request
response = await client.post(url, headers=headers, data=image)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 492, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 634, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 658, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 273, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 228, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 49, in send
stream_id = await self.send_headers(request, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 121, in send_headers
await self.stream.write(data_to_send, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 149, in write
self.stream_writer.drain(), timeout.write_timeout
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 348, in drain
await self._protocol._drain_helper()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 206, in _drain_helper
assert waiter is None or waiter.cancelled()
AssertionError
|
AssertionError
|
async def write(self, data: bytes, timeout: Timeout) -> None:
if not data:
return
try:
async with self.write_lock:
self.stream_writer.write(data)
return await asyncio.wait_for(
self.stream_writer.drain(), timeout.write_timeout
)
except asyncio.TimeoutError:
raise WriteTimeout() from None
|
async def write(self, data: bytes, timeout: Timeout) -> None:
if not data:
return
self.stream_writer.write(data)
try:
return await asyncio.wait_for(self.stream_writer.drain(), timeout.write_timeout)
except asyncio.TimeoutError:
raise WriteTimeout() from None
|
https://github.com/encode/httpx/issues/551
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 57, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 19, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 43, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 50, in _send_request
response = await client.post(url, headers=headers, data=image)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 492, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 634, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 658, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 273, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 228, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 49, in send
stream_id = await self.send_headers(request, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 121, in send_headers
await self.stream.write(data_to_send, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 149, in write
self.stream_writer.drain(), timeout.write_timeout
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 348, in drain
await self._protocol._drain_helper()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 206, in _drain_helper
assert waiter is None or waiter.cancelled()
AssertionError
|
AssertionError
|
async def close(self) -> None:
# NOTE: StreamWriter instances expose a '.wait_closed()' coroutine function,
# but using it has caused compatibility issues with certain sites in
# the past (see https://github.com/encode/httpx/issues/634), which is
# why we don't call it here.
# This is fine, though, because '.close()' schedules the actual closing of the
# stream, meaning that at best it will happen during the next event loop
# iteration, and at worst asyncio will take care of it on program exit.
async with self.write_lock:
self.stream_writer.close()
|
async def close(self) -> None:
# NOTE: StreamWriter instances expose a '.wait_closed()' coroutine function,
# but using it has caused compatibility issues with certain sites in
# the past (see https://github.com/encode/httpx/issues/634), which is
# why we don't call it here.
# This is fine, though, because '.close()' schedules the actual closing of the
# stream, meaning that at best it will happen during the next event loop
# iteration, and at worst asyncio will take care of it on program exit.
self.stream_writer.close()
|
https://github.com/encode/httpx/issues/551
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 57, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 19, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 43, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 50, in _send_request
response = await client.post(url, headers=headers, data=image)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 492, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 634, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 658, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 273, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 228, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 49, in send
stream_id = await self.send_headers(request, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 121, in send_headers
await self.stream.write(data_to_send, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 149, in write
self.stream_writer.drain(), timeout.write_timeout
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 348, in drain
await self._protocol._drain_helper()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 206, in _drain_helper
assert waiter is None or waiter.cancelled()
AssertionError
|
AssertionError
|
async def close(self) -> None:
async with self.write_lock:
await self.stream.aclose()
|
async def close(self) -> None:
await self.stream.aclose()
|
https://github.com/encode/httpx/issues/551
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 57, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 19, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 43, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 50, in _send_request
response = await client.post(url, headers=headers, data=image)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 492, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 634, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 658, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 273, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 228, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 49, in send
stream_id = await self.send_headers(request, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 121, in send_headers
await self.stream.write(data_to_send, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 149, in write
self.stream_writer.drain(), timeout.write_timeout
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 348, in drain
await self._protocol._drain_helper()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 206, in _drain_helper
assert waiter is None or waiter.cancelled()
AssertionError
|
AssertionError
|
def __init__(self) -> None:
self._lock = trio.Lock()
|
def __init__(self) -> None:
self._event = trio.Event()
|
https://github.com/encode/httpx/issues/551
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 57, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 19, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 43, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 50, in _send_request
response = await client.post(url, headers=headers, data=image)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 492, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 634, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 658, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 273, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 228, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 49, in send
stream_id = await self.send_headers(request, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 121, in send_headers
await self.stream.write(data_to_send, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 149, in write
self.stream_writer.drain(), timeout.write_timeout
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 348, in drain
await self._protocol._drain_helper()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 206, in _drain_helper
assert waiter is None or waiter.cancelled()
AssertionError
|
AssertionError
|
async def acquire(self) -> None:
await self._lock.acquire()
|
async def acquire(self, timeout: float = None) -> None:
timeout = none_as_inf(timeout)
with trio.move_on_after(timeout):
await self.semaphore.acquire()
return
raise self.exc_class()
|
https://github.com/encode/httpx/issues/551
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 57, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 19, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 43, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 50, in _send_request
response = await client.post(url, headers=headers, data=image)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 492, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 634, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 658, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 273, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 228, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 49, in send
stream_id = await self.send_headers(request, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 121, in send_headers
await self.stream.write(data_to_send, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 149, in write
self.stream_writer.drain(), timeout.write_timeout
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 348, in drain
await self._protocol._drain_helper()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 206, in _drain_helper
assert waiter is None or waiter.cancelled()
AssertionError
|
AssertionError
|
def release(self) -> None:
self._lock.release()
|
def release(self) -> None:
self.semaphore.release()
|
https://github.com/encode/httpx/issues/551
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 57, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 19, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 43, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 50, in _send_request
response = await client.post(url, headers=headers, data=image)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 492, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 634, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 658, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 273, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 228, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 49, in send
stream_id = await self.send_headers(request, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 121, in send_headers
await self.stream.write(data_to_send, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 149, in write
self.stream_writer.drain(), timeout.write_timeout
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 348, in drain
await self._protocol._drain_helper()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 206, in _drain_helper
assert waiter is None or waiter.cancelled()
AssertionError
|
AssertionError
|
def __init__(
self,
socket: BaseSocketStream,
backend: typing.Union[str, ConcurrencyBackend] = "auto",
on_release: typing.Callable = None,
):
self.socket = socket
self.backend = lookup_backend(backend)
self.on_release = on_release
self.state = h2.connection.H2Connection(config=self.CONFIG)
self.streams = {} # type: typing.Dict[int, HTTP2Stream]
self.events = {} # type: typing.Dict[int, typing.List[h2.events.Event]]
self.sent_connection_init = False
|
def __init__(
self,
socket: BaseSocketStream,
backend: typing.Union[str, ConcurrencyBackend] = "auto",
on_release: typing.Callable = None,
):
self.socket = socket
self.backend = lookup_backend(backend)
self.on_release = on_release
self.state = h2.connection.H2Connection(config=self.CONFIG)
self.streams = {} # type: typing.Dict[int, HTTP2Stream]
self.events = {} # type: typing.Dict[int, typing.List[h2.events.Event]]
self.init_started = False
|
https://github.com/encode/httpx/issues/551
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 57, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 19, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 43, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 50, in _send_request
response = await client.post(url, headers=headers, data=image)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 492, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 634, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 658, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 273, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 228, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 49, in send
stream_id = await self.send_headers(request, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 121, in send_headers
await self.stream.write(data_to_send, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 149, in write
self.stream_writer.drain(), timeout.write_timeout
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 348, in drain
await self._protocol._drain_helper()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 206, in _drain_helper
assert waiter is None or waiter.cancelled()
AssertionError
|
AssertionError
|
async def send(self, request: Request, timeout: Timeout = None) -> Response:
timeout = Timeout() if timeout is None else timeout
async with self.init_lock:
if not self.sent_connection_init:
# The very first stream is responsible for initiating the connection.
await self.send_connection_init(timeout)
self.sent_connection_init = True
stream_id = self.state.get_next_available_stream_id()
stream = HTTP2Stream(stream_id=stream_id, connection=self)
self.streams[stream_id] = stream
self.events[stream_id] = []
return await stream.send(request, timeout)
|
async def send(self, request: Request, timeout: Timeout = None) -> Response:
timeout = Timeout() if timeout is None else timeout
if not self.init_started:
# The very first stream is responsible for initiating the connection.
self.init_started = True
await self.send_connection_init(timeout)
stream_id = self.state.get_next_available_stream_id()
self.init_complete.set()
else:
# All other streams need to wait until the connection is established.
await self.init_complete.wait()
stream_id = self.state.get_next_available_stream_id()
stream = HTTP2Stream(stream_id=stream_id, connection=self)
self.streams[stream_id] = stream
self.events[stream_id] = []
return await stream.send(request, timeout)
|
https://github.com/encode/httpx/issues/551
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 57, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 19, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 43, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 50, in _send_request
response = await client.post(url, headers=headers, data=image)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 492, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 634, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 658, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 273, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 228, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 49, in send
stream_id = await self.send_headers(request, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 121, in send_headers
await self.stream.write(data_to_send, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 149, in write
self.stream_writer.drain(), timeout.write_timeout
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 348, in drain
await self._protocol._drain_helper()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 206, in _drain_helper
assert waiter is None or waiter.cancelled()
AssertionError
|
AssertionError
|
def __init__(
self,
*,
auth: AuthTypes = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
verify: VerifyTypes = True,
cert: CertTypes = None,
http_versions: HTTPVersionTypes = None,
proxies: ProxiesTypes = None,
timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
max_redirects: int = DEFAULT_MAX_REDIRECTS,
base_url: URLTypes = None,
dispatch: typing.Union[AsyncDispatcher, Dispatcher] = None,
app: typing.Callable = None,
backend: ConcurrencyBackend = None,
trust_env: bool = True,
):
if backend is None:
backend = AsyncioBackend()
self.check_concurrency_backend(backend)
if app is not None:
param_count = len(inspect.signature(app).parameters)
assert param_count in (2, 3)
if param_count == 2:
dispatch = WSGIDispatch(app=app)
else:
dispatch = ASGIDispatch(app=app, backend=backend)
self.trust_env = True if trust_env is None else trust_env
if dispatch is None:
async_dispatch: AsyncDispatcher = ConnectionPool(
verify=verify,
cert=cert,
timeout=timeout,
http_versions=http_versions,
pool_limits=pool_limits,
backend=backend,
trust_env=self.trust_env,
)
elif isinstance(dispatch, Dispatcher):
async_dispatch = ThreadedDispatcher(dispatch, backend)
else:
async_dispatch = dispatch
if base_url is None:
self.base_url = URL("", allow_relative=True)
else:
self.base_url = URL(base_url)
if params is None:
params = {}
self.auth = auth
self._params = QueryParams(params)
self._headers = Headers(headers)
self._cookies = Cookies(cookies)
self.max_redirects = max_redirects
self.dispatch = async_dispatch
self.concurrency_backend = backend
if proxies is None and trust_env:
proxies = typing.cast(ProxiesTypes, get_environment_proxies())
self.proxies: typing.Dict[str, AsyncDispatcher] = _proxies_to_dispatchers(
proxies,
verify=verify,
cert=cert,
timeout=timeout,
http_versions=http_versions,
pool_limits=pool_limits,
backend=backend,
trust_env=trust_env,
)
|
def __init__(
self,
*,
auth: AuthTypes = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
verify: VerifyTypes = True,
cert: CertTypes = None,
http_versions: HTTPVersionTypes = None,
proxies: ProxiesTypes = None,
timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
max_redirects: int = DEFAULT_MAX_REDIRECTS,
base_url: URLTypes = None,
dispatch: typing.Union[AsyncDispatcher, Dispatcher] = None,
app: typing.Callable = None,
backend: ConcurrencyBackend = None,
trust_env: bool = True,
):
if backend is None:
backend = AsyncioBackend()
self.check_concurrency_backend(backend)
if app is not None:
param_count = len(inspect.signature(app).parameters)
assert param_count in (2, 3)
if param_count == 2:
dispatch = WSGIDispatch(app=app)
else:
dispatch = ASGIDispatch(app=app, backend=backend)
self.trust_env = True if trust_env is None else trust_env
if dispatch is None:
async_dispatch: AsyncDispatcher = ConnectionPool(
verify=verify,
cert=cert,
timeout=timeout,
http_versions=http_versions,
pool_limits=pool_limits,
backend=backend,
trust_env=self.trust_env,
)
elif isinstance(dispatch, Dispatcher):
async_dispatch = ThreadedDispatcher(dispatch, backend)
else:
async_dispatch = dispatch
if base_url is None:
self.base_url = URL("", allow_relative=True)
else:
self.base_url = URL(base_url)
if proxies is None and trust_env:
proxies = typing.cast(ProxiesTypes, get_environment_proxies())
self.proxies: typing.Dict[str, AsyncDispatcher] = _proxies_to_dispatchers(proxies)
if params is None:
params = {}
self.auth = auth
self._params = QueryParams(params)
self._headers = Headers(headers)
self._cookies = Cookies(cookies)
self.max_redirects = max_redirects
self.dispatch = async_dispatch
self.concurrency_backend = backend
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def _get_response(
self,
request: AsyncRequest,
*,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
verify: VerifyTypes = None,
cert: CertTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
if request.url.scheme not in ("http", "https"):
raise InvalidURL('URL scheme must be "http" or "https".')
dispatch = self._dispatcher_for_request(request, self.proxies)
async def get_response(request: AsyncRequest) -> AsyncResponse:
try:
with ElapsedTimer() as timer:
response = await dispatch.send(
request, verify=verify, cert=cert, timeout=timeout
)
response.elapsed = timer.elapsed
except HTTPError as exc:
# Add the original request to any HTTPError unless
# there'a already a request attached in the case of
# a ProxyError.
if exc.request is None:
exc.request = request
raise
self.cookies.extract_cookies(response)
if not stream:
try:
await response.read()
finally:
await response.close()
return response
def wrap(
get_response: typing.Callable, middleware: BaseMiddleware
) -> typing.Callable:
return functools.partial(middleware, get_response=get_response)
get_response = wrap(
get_response,
RedirectMiddleware(allow_redirects=allow_redirects, cookies=self.cookies),
)
auth_middleware = self._get_auth_middleware(
request=request,
trust_env=self.trust_env if trust_env is None else trust_env,
auth=self.auth if auth is None else auth,
)
if auth_middleware is not None:
get_response = wrap(get_response, auth_middleware)
return await get_response(request)
|
async def _get_response(
self,
request: AsyncRequest,
*,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
verify: VerifyTypes = None,
cert: CertTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
if request.url.scheme not in ("http", "https"):
raise InvalidURL('URL scheme must be "http" or "https".')
if proxies is not None:
dispatch_proxies = _proxies_to_dispatchers(proxies)
else:
dispatch_proxies = self.proxies
dispatch = self._dispatcher_for_request(request, dispatch_proxies)
async def get_response(request: AsyncRequest) -> AsyncResponse:
try:
with ElapsedTimer() as timer:
response = await dispatch.send(
request, verify=verify, cert=cert, timeout=timeout
)
response.elapsed = timer.elapsed
except HTTPError as exc:
# Add the original request to any HTTPError unless
# there'a already a request attached in the case of
# a ProxyError.
if exc.request is None:
exc.request = request
raise
self.cookies.extract_cookies(response)
if not stream:
try:
await response.read()
finally:
await response.close()
return response
def wrap(
get_response: typing.Callable, middleware: BaseMiddleware
) -> typing.Callable:
return functools.partial(middleware, get_response=get_response)
get_response = wrap(
get_response,
RedirectMiddleware(allow_redirects=allow_redirects, cookies=self.cookies),
)
auth_middleware = self._get_auth_middleware(
request=request,
trust_env=self.trust_env if trust_env is None else trust_env,
auth=self.auth if auth is None else auth,
)
if auth_middleware is not None:
get_response = wrap(get_response, auth_middleware)
return await get_response(request)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def get(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
return await self.request(
"GET",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
async def get(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"GET",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def options(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
return await self.request(
"OPTIONS",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
async def options(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"OPTIONS",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def head(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = False, # NOTE: Differs to usual default.
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
return await self.request(
"HEAD",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
async def head(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = False, # NOTE: Differs to usual default.
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"HEAD",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def post(
self,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
return await self.request(
"POST",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
async def post(
self,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"POST",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def put(
self,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
return await self.request(
"PUT",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
async def put(
self,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"PUT",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def patch(
self,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
return await self.request(
"PATCH",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
async def patch(
self,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"PATCH",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def delete(
self,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
return await self.request(
"DELETE",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
async def delete(
self,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"DELETE",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def request(
self,
method: str,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
request = self.build_request(
method=method,
url=url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
)
response = await self.send(
request,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
return response
|
async def request(
self,
method: str,
url: URLTypes,
*,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
request = self.build_request(
method=method,
url=url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
)
response = await self.send(
request,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
return response
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
async def send(
self,
request: AsyncRequest,
*,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
verify: VerifyTypes = None,
cert: CertTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> AsyncResponse:
return await self._get_response(
request=request,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
async def send(
self,
request: AsyncRequest,
*,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
verify: VerifyTypes = None,
cert: CertTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self._get_response(
request=request,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def request(
self,
method: str,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> Response:
request = self.build_request(
method=method,
url=url,
data=self._async_request_data(data),
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
)
response = self.send(
request,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
return response
|
def request(
self,
method: str,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> Response:
request = self.build_request(
method=method,
url=url,
data=self._async_request_data(data),
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
)
response = self.send(
request,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
return response
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def send(
self,
request: AsyncRequest,
*,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
verify: VerifyTypes = None,
cert: CertTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> Response:
concurrency_backend = self.concurrency_backend
coroutine = self._get_response
args = [request]
kwargs = {
"stream": True,
"auth": auth,
"allow_redirects": allow_redirects,
"verify": verify,
"cert": cert,
"timeout": timeout,
"trust_env": trust_env,
}
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
content = getattr(
async_response, "_raw_content", getattr(async_response, "_raw_stream", None)
)
sync_content = self._sync_data(content)
def sync_on_close() -> None:
nonlocal concurrency_backend, async_response
concurrency_backend.run(async_response.on_close)
response = Response(
status_code=async_response.status_code,
http_version=async_response.http_version,
headers=async_response.headers,
content=sync_content,
on_close=sync_on_close,
request=async_response.request,
history=async_response.history,
elapsed=async_response.elapsed,
)
if not stream:
try:
response.read()
finally:
response.close()
return response
|
def send(
self,
request: AsyncRequest,
*,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
verify: VerifyTypes = None,
cert: CertTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> Response:
concurrency_backend = self.concurrency_backend
coroutine = self._get_response
args = [request]
kwargs = {
"stream": True,
"auth": auth,
"allow_redirects": allow_redirects,
"verify": verify,
"cert": cert,
"timeout": timeout,
"trust_env": trust_env,
"proxies": proxies,
}
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
content = getattr(
async_response, "_raw_content", getattr(async_response, "_raw_stream", None)
)
sync_content = self._sync_data(content)
def sync_on_close() -> None:
nonlocal concurrency_backend, async_response
concurrency_backend.run(async_response.on_close)
response = Response(
status_code=async_response.status_code,
http_version=async_response.http_version,
headers=async_response.headers,
content=sync_content,
on_close=sync_on_close,
request=async_response.request,
history=async_response.history,
elapsed=async_response.elapsed,
)
if not stream:
try:
response.read()
finally:
response.close()
return response
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def get(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> Response:
return self.request(
"GET",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
def get(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"GET",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def options(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> Response:
return self.request(
"OPTIONS",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
def options(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"OPTIONS",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def head(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = False, # NOTE: Differs to usual default.
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> Response:
return self.request(
"HEAD",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
def head(
self,
url: URLTypes,
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = False, # NOTE: Differs to usual default.
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"HEAD",
url,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def post(
self,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> Response:
return self.request(
"POST",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
def post(
self,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"POST",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def put(
self,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> Response:
return self.request(
"PUT",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
def put(
self,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"PUT",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def patch(
self,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> Response:
return self.request(
"PATCH",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
def patch(
self,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"PATCH",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def delete(
self,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
) -> Response:
return self.request(
"DELETE",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
)
|
def delete(
self,
url: URLTypes,
*,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
stream: bool = False,
auth: AuthTypes = None,
allow_redirects: bool = True,
cert: CertTypes = None,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"DELETE",
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
stream=stream,
auth=auth,
allow_redirects=allow_redirects,
verify=verify,
cert=cert,
timeout=timeout,
trust_env=trust_env,
proxies=proxies,
)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def _proxies_to_dispatchers(
proxies: typing.Optional[ProxiesTypes],
verify: VerifyTypes,
cert: typing.Optional[CertTypes],
timeout: TimeoutTypes,
http_versions: typing.Optional[HTTPVersionTypes],
pool_limits: PoolLimits,
backend: ConcurrencyBackend,
trust_env: bool,
) -> typing.Dict[str, AsyncDispatcher]:
def _proxy_from_url(url: URLTypes) -> AsyncDispatcher:
nonlocal verify, cert, timeout, http_versions, pool_limits, backend, trust_env
url = URL(url)
if url.scheme in ("http", "https"):
return HTTPProxy(
url,
verify=verify,
cert=cert,
timeout=timeout,
pool_limits=pool_limits,
backend=backend,
trust_env=trust_env,
http_versions=http_versions,
)
raise ValueError(f"Unknown proxy for {url!r}")
if proxies is None:
return {}
elif isinstance(proxies, (str, URL)):
return {"all": _proxy_from_url(proxies)}
elif isinstance(proxies, AsyncDispatcher):
return {"all": proxies}
else:
new_proxies = {}
for key, dispatcher_or_url in proxies.items():
if isinstance(dispatcher_or_url, (str, URL)):
new_proxies[str(key)] = _proxy_from_url(dispatcher_or_url)
else:
new_proxies[str(key)] = dispatcher_or_url
return new_proxies
|
def _proxies_to_dispatchers(
proxies: typing.Optional[ProxiesTypes],
) -> typing.Dict[str, AsyncDispatcher]:
if proxies is None:
return {}
elif isinstance(proxies, (str, URL)):
return {"all": _proxy_from_url(proxies)}
elif isinstance(proxies, AsyncDispatcher):
return {"all": proxies}
else:
new_proxies = {}
for key, dispatcher_or_url in proxies.items():
if isinstance(dispatcher_or_url, (str, URL)):
new_proxies[str(key)] = _proxy_from_url(dispatcher_or_url)
else:
new_proxies[str(key)] = dispatcher_or_url
return new_proxies
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def _proxy_from_url(url: URLTypes) -> AsyncDispatcher:
nonlocal verify, cert, timeout, http_versions, pool_limits, backend, trust_env
url = URL(url)
if url.scheme in ("http", "https"):
return HTTPProxy(
url,
verify=verify,
cert=cert,
timeout=timeout,
pool_limits=pool_limits,
backend=backend,
trust_env=trust_env,
http_versions=http_versions,
)
raise ValueError(f"Unknown proxy for {url!r}")
|
def _proxy_from_url(url: URLTypes) -> AsyncDispatcher:
url = URL(url)
if url.scheme in ("http", "https"):
return HTTPProxy(url)
raise ValueError(f"Unknown proxy for {url!r}")
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def __init__(
self,
proxy_url: URLTypes,
*,
proxy_headers: HeaderTypes = None,
proxy_mode: HTTPProxyMode = HTTPProxyMode.DEFAULT,
verify: VerifyTypes = True,
cert: CertTypes = None,
trust_env: bool = None,
timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
http_versions: HTTPVersionTypes = None,
backend: ConcurrencyBackend = None,
):
super(HTTPProxy, self).__init__(
verify=verify,
cert=cert,
timeout=timeout,
pool_limits=pool_limits,
backend=backend,
trust_env=trust_env,
http_versions=http_versions,
)
self.proxy_url = URL(proxy_url)
self.proxy_mode = proxy_mode
self.proxy_headers = Headers(proxy_headers)
url = self.proxy_url
if url.username or url.password:
self.proxy_headers.setdefault(
"Proxy-Authorization",
build_basic_auth_header(url.username, url.password),
)
# Remove userinfo from the URL authority, e.g.:
# 'username:password@proxy_host:proxy_port' -> 'proxy_host:proxy_port'
credentials, _, authority = url.authority.rpartition("@")
self.proxy_url = url.copy_with(authority=authority)
|
def __init__(
self,
proxy_url: URLTypes,
*,
proxy_headers: HeaderTypes = None,
proxy_mode: HTTPProxyMode = HTTPProxyMode.DEFAULT,
verify: VerifyTypes = True,
cert: CertTypes = None,
timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
backend: ConcurrencyBackend = None,
):
super(HTTPProxy, self).__init__(
verify=verify,
cert=cert,
timeout=timeout,
pool_limits=pool_limits,
backend=backend,
)
self.proxy_url = URL(proxy_url)
self.proxy_mode = proxy_mode
self.proxy_headers = Headers(proxy_headers)
url = self.proxy_url
if url.username or url.password:
self.proxy_headers.setdefault(
"Proxy-Authorization",
build_basic_auth_header(url.username, url.password),
)
# Remove userinfo from the URL authority, e.g.:
# 'username:password@proxy_host:proxy_port' -> 'proxy_host:proxy_port'
credentials, _, authority = url.authority.rpartition("@")
self.proxy_url = url.copy_with(authority=authority)
|
https://github.com/encode/httpx/issues/376
|
SSL handshake failed on verifying the certificate
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport fd=768 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 625, in _on_handshake_complete
raise handshake_exc
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
SSL error in data received
protocol: <asyncio.sslproto.SSLProtocol object at 0x0000020CDCD7E470>
transport: <_SelectorSocketTransport closing fd=768 read=idle write=<idle, bufsize=0>>
Traceback (most recent call last):
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
Traceback (most recent call last):
File "C:/Users/localhost/PycharmProjects/TribalW_Test_Env/Basic/tests/httpx/proxy_test.py", line 11, in <module>
request = client.get("http://whoer.net",headers=headers,verify=client_cer)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 818, in get
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 729, in request
proxies=proxies,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 760, in send
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 241, in run
return self.loop.run_until_complete(coroutine(*args, **kwargs))
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 230, in _get_response
return await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 41, in __call__
return await self(next_request, get_response)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\middleware\redirect.py", line 31, in __call__
response = await get_response(request)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\client.py", line 191, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 243, in send
request=request, verify=verify, cert=cert, timeout=timeout
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\connection_pool.py", line 118, in send
connection = await self.acquire_connection(origin=request.url.origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 92, in acquire_connection
return await self.tunnel_connection(origin)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 110, in tunnel_connection
await self.tunnel_start_tls(origin, connection)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\dispatch\proxy_http.py", line 194, in tunnel_start_tls
timeout=timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\site-packages\httpx\concurrency\asyncio.py", line 216, in start_tls
timeout=timeout.connect_timeout,
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\tasks.py", line 416, in wait_for
return fut.result()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\base_events.py", line 1134, in start_tls
await waiter
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 526, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\asyncio\sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "C:\Users\localhost\AppData\Local\Programs\Python\Python37\lib\ssl.py", line 763, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self signed certificate in certificate chain (_ssl.c:1056)
|
ssl.SSLCertVerificationError
|
def __init__(
self,
stream_reader: asyncio.StreamReader,
stream_writer: asyncio.StreamWriter,
timeout: TimeoutConfig,
):
self.stream_reader = stream_reader
self.stream_writer = stream_writer
self.timeout = timeout
self.read_lock = asyncio.Lock()
self._inner: typing.Optional[SocketStream] = None
|
def __init__(
self,
stream_reader: asyncio.StreamReader,
stream_writer: asyncio.StreamWriter,
timeout: TimeoutConfig,
):
self.stream_reader = stream_reader
self.stream_writer = stream_writer
self.timeout = timeout
self._inner: typing.Optional[SocketStream] = None
|
https://github.com/encode/httpx/issues/527
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 63, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 18, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 41, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 56, in _send_request
data=image
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 484, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 626, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 650, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 265, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 226, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 52, in send
status_code, headers = await self.receive_response(stream_id, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 171, in receive_response
event = await self.receive_event(stream_id, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 206, in receive_event
data = await self.stream.read(self.READ_NUM_BYTES, timeout, flag=flag)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 114, in read
data = await asyncio.wait_for(self.stream_reader.read(n), read_timeout)
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 640, in read
await self._wait_for_data('read')
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 460, in _wait_for_data
f'{func_name}() called while another coroutine is '
RuntimeError: read() called while another coroutine is already waiting for incoming data
|
RuntimeError
|
async def read(
self, n: int, timeout: TimeoutConfig = None, flag: TimeoutFlag = None
) -> bytes:
if timeout is None:
timeout = self.timeout
while True:
# Check our flag at the first possible moment, and use a fine
# grained retry loop if we're not yet in read-timeout mode.
should_raise = flag is None or flag.raise_on_read_timeout
read_timeout = timeout.read_timeout if should_raise else 0.01
try:
async with self.read_lock:
data = await asyncio.wait_for(self.stream_reader.read(n), read_timeout)
except asyncio.TimeoutError:
if should_raise:
raise ReadTimeout() from None
# FIX(py3.6): yield control back to the event loop to give it a chance
# to cancel `.read(n)` before we retry.
# This prevents concurrent `.read()` calls, which asyncio
# doesn't seem to allow on 3.6.
# See: https://github.com/encode/httpx/issues/382
await asyncio.sleep(0)
else:
break
return data
|
async def read(
self, n: int, timeout: TimeoutConfig = None, flag: TimeoutFlag = None
) -> bytes:
if timeout is None:
timeout = self.timeout
while True:
# Check our flag at the first possible moment, and use a fine
# grained retry loop if we're not yet in read-timeout mode.
should_raise = flag is None or flag.raise_on_read_timeout
read_timeout = timeout.read_timeout if should_raise else 0.01
try:
data = await asyncio.wait_for(self.stream_reader.read(n), read_timeout)
break
except asyncio.TimeoutError:
if should_raise:
raise ReadTimeout() from None
# FIX(py3.6): yield control back to the event loop to give it a chance
# to cancel `.read(n)` before we retry.
# This prevents concurrent `.read()` calls, which asyncio
# doesn't seem to allow on 3.6.
# See: https://github.com/encode/httpx/issues/382
await asyncio.sleep(0)
return data
|
https://github.com/encode/httpx/issues/527
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 63, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 18, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 41, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 56, in _send_request
data=image
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 484, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 626, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 650, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 265, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 226, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 52, in send
status_code, headers = await self.receive_response(stream_id, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 171, in receive_response
event = await self.receive_event(stream_id, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 206, in receive_event
data = await self.stream.read(self.READ_NUM_BYTES, timeout, flag=flag)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 114, in read
data = await asyncio.wait_for(self.stream_reader.read(n), read_timeout)
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 640, in read
await self._wait_for_data('read')
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 460, in _wait_for_data
f'{func_name}() called while another coroutine is '
RuntimeError: read() called while another coroutine is already waiting for incoming data
|
RuntimeError
|
def __init__(
self,
stream: typing.Union[trio.SocketStream, trio.SSLStream],
timeout: TimeoutConfig,
) -> None:
self.stream = stream
self.timeout = timeout
self.write_buffer = b""
self.read_lock = trio.Lock()
self.write_lock = trio.Lock()
|
def __init__(
self,
stream: typing.Union[trio.SocketStream, trio.SSLStream],
timeout: TimeoutConfig,
) -> None:
self.stream = stream
self.timeout = timeout
self.write_buffer = b""
self.write_lock = trio.Lock()
|
https://github.com/encode/httpx/issues/527
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 63, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 18, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 41, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 56, in _send_request
data=image
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 484, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 626, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 650, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 265, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 226, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 52, in send
status_code, headers = await self.receive_response(stream_id, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 171, in receive_response
event = await self.receive_event(stream_id, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 206, in receive_event
data = await self.stream.read(self.READ_NUM_BYTES, timeout, flag=flag)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 114, in read
data = await asyncio.wait_for(self.stream_reader.read(n), read_timeout)
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 640, in read
await self._wait_for_data('read')
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 460, in _wait_for_data
f'{func_name}() called while another coroutine is '
RuntimeError: read() called while another coroutine is already waiting for incoming data
|
RuntimeError
|
async def read(
self, n: int, timeout: TimeoutConfig = None, flag: TimeoutFlag = None
) -> bytes:
if timeout is None:
timeout = self.timeout
while True:
# Check our flag at the first possible moment, and use a fine
# grained retry loop if we're not yet in read-timeout mode.
should_raise = flag is None or flag.raise_on_read_timeout
read_timeout = _or_inf(timeout.read_timeout if should_raise else 0.01)
with trio.move_on_after(read_timeout):
async with self.read_lock:
return await self.stream.receive_some(max_bytes=n)
if should_raise:
raise ReadTimeout() from None
|
async def read(
self, n: int, timeout: TimeoutConfig = None, flag: TimeoutFlag = None
) -> bytes:
if timeout is None:
timeout = self.timeout
while True:
# Check our flag at the first possible moment, and use a fine
# grained retry loop if we're not yet in read-timeout mode.
should_raise = flag is None or flag.raise_on_read_timeout
read_timeout = _or_inf(timeout.read_timeout if should_raise else 0.01)
with trio.move_on_after(read_timeout):
return await self.stream.receive_some(max_bytes=n)
if should_raise:
raise ReadTimeout() from None
|
https://github.com/encode/httpx/issues/527
|
Traceback (most recent call last):
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 63, in <module>
asyncio.get_event_loop().run_until_complete(cl.embedd_batch())
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 18, in embedd_batch
embeddings = await asyncio.gather(*requests)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 41, in _send_to_server
emb = await self._send_request(client, im, url)
File "/Users/primoz/PycharmProjects/orange3-imageanalytics/example.py", line 56, in _send_request
data=image
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 484, in post
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 626, in request
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 650, in send
trust_env=trust_env,
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 265, in _get_response
return await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/middleware/redirect.py", line 31, in __call__
response = await get_response(request)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/client.py", line 226, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/connection.py", line 62, in send
response = await self.h2_connection.send(request, timeout=timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 52, in send
status_code, headers = await self.receive_response(stream_id, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 171, in receive_response
event = await self.receive_event(stream_id, timeout)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/dispatch/http2.py", line 206, in receive_event
data = await self.stream.read(self.READ_NUM_BYTES, timeout, flag=flag)
File "/Users/primoz/venv/orange/lib/python3.7/site-packages/httpx/concurrency/asyncio.py", line 114, in read
data = await asyncio.wait_for(self.stream_reader.read(n), read_timeout)
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 640, in read
await self._wait_for_data('read')
File "/Users/primoz/miniconda3/lib/python3.7/asyncio/streams.py", line 460, in _wait_for_data
f'{func_name}() called while another coroutine is '
RuntimeError: read() called while another coroutine is already waiting for incoming data
|
RuntimeError
|
def build_redirect_request(
self, request: AsyncRequest, response: AsyncResponse
) -> AsyncRequest:
method = self.redirect_method(request, response)
url = self.redirect_url(request, response)
headers = self.redirect_headers(request, url, method) # TODO: merge headers?
content = self.redirect_content(request, method)
cookies = Cookies(self.cookies)
cookies.update(request.cookies)
return AsyncRequest(
method=method, url=url, headers=headers, data=content, cookies=cookies
)
|
def build_redirect_request(
self, request: AsyncRequest, response: AsyncResponse
) -> AsyncRequest:
method = self.redirect_method(request, response)
url = self.redirect_url(request, response)
headers = self.redirect_headers(request, url) # TODO: merge headers?
content = self.redirect_content(request, method)
cookies = Cookies(self.cookies)
cookies.update(request.cookies)
return AsyncRequest(
method=method, url=url, headers=headers, data=content, cookies=cookies
)
|
https://github.com/encode/httpx/issues/309
|
DEBUG:asyncio:Using selector: KqueueSelector
DEBUG:httpx.dispatch.connection_pool:new_connection connection=HTTPConnection(origin=Origin(scheme='http' host='127.0.0.1' port=8000))
DEBUG:httpx.dispatch.connection:start_connect host='127.0.0.1' port=8000 timeout=TimeoutConfig(timeout=5.0)
DEBUG:httpx.dispatch.connection:connected http_version='HTTP/1.1'
DEBUG:httpx.dispatch.http11:send_headers method='POST' target='/debug' headers=Headers({'host': '127.0.0.1:8000', 'user-agent': 'python-httpx/0.7.2', 'accept': '*/*', 'content-length': '5', 'accept-encoding': 'gzip, deflate', 'connection': 'keep-alive'})
DEBUG:httpx.dispatch.http11:receive_event event=NEED_DATA
DEBUG:httpx.dispatch.http11:send_data data=Data(<5 bytes>)
DEBUG:httpx.dispatch.http11:receive_event event=Response(status_code=302, headers=[(b'date', b'Tue, 03 Sep 2019 03:50:53 GMT'), (b'server', b'uvicorn'), (b'location', b'https://httpbin.org/headers'), (b'transfer-encoding', b'chunked')], http_version=b'1.1', reason=b'Found')
DEBUG:httpx.dispatch.http11:receive_event event=EndOfMessage(headers=[])
DEBUG:httpx.dispatch.http11:response_closed our_state=DONE their_state=DONE
DEBUG:httpx.dispatch.connection_pool:release_connection connection=HTTPConnection(origin=Origin(scheme='http' host='127.0.0.1' port=8000))
DEBUG:httpx.dispatch.connection_pool:new_connection connection=HTTPConnection(origin=Origin(scheme='https' host='httpbin.org' port=443))
DEBUG:httpx.dispatch.connection:start_connect host='httpbin.org' port=443 timeout=TimeoutConfig(timeout=5.0)
DEBUG:httpx.dispatch.connection:connected http_version='HTTP/1.1'
DEBUG:httpx.dispatch.http11:send_headers method='GET' target='/headers' headers=Headers({'host': 'httpbin.org', 'user-agent': 'python-httpx/0.7.2', 'accept': '*/*', 'content-length': '5', 'accept-encoding': 'gzip, deflate', 'connection': 'keep-alive'})
DEBUG:httpx.dispatch.http11:receive_event event=NEED_DATA
Traceback (most recent call last):
File "http3/httpx/dispatch/http11.py", line 53, in send
http_version, status_code, headers = await self._receive_response(timeout)
File "http3/httpx/dispatch/http11.py", line 133, in _receive_response
event = await self._receive_event(timeout)
File "http3/httpx/dispatch/http11.py", line 174, in _receive_event
self.READ_NUM_BYTES, timeout, flag=self.timeout_flag
File "http3/httpx/concurrency/asyncio.py", line 92, in read
raise ReadTimeout() from None
http3.httpx.exceptions.ReadTimeout
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "test_proto.py", line 16, in <module>
asyncio.get_event_loop().run_until_complete(test())
File "/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "test_proto.py", line 11, in test
resp = await client.post(url, data='debug', allow_redirects=True)
File "http3/httpx/client.py", line 415, in post
trust_env=trust_env,
File "http3/httpx/client.py", line 566, in request
trust_env=trust_env,
File "http3/httpx/client.py", line 237, in send
return await get_response(request)
File "http3/httpx/middleware.py", line 72, in __call__
return await self(next_request, get_response)
File "http3/httpx/middleware.py", line 62, in __call__
response = await get_response(request)
File "http3/httpx/client.py", line 202, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "http3/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "http3/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "http3/httpx/dispatch/connection.py", line 65, in send
response = await self.h11_connection.send(request, timeout=timeout)
File "http3/httpx/dispatch/http11.py", line 53, in send
http_version, status_code, headers = await self._receive_response(timeout)
File "http3/httpx/concurrency/asyncio.py", line 285, in __aexit__
await self.task
File "http3/httpx/dispatch/http11.py", line 108, in _send_request_data
await self._send_event(event, timeout)
File "http3/httpx/dispatch/http11.py", line 123, in _send_event
bytes_to_send = self.h11_state.send(event)
File "site-packages/h11/_connection.py", line 469, in send
data_list = self.send_with_data_passthrough(event)
File "site-packages/h11/_connection.py", line 502, in send_with_data_passthrough
writer(event, data_list.append)
File "site-packages/h11/_writers.py", line 79, in __call__
self.send_eom(event.headers, write)
File "site-packages/h11/_writers.py", line 102, in send_eom
raise LocalProtocolError("Too little data for declared Content-Length")
h11._util.LocalProtocolError: Too little data for declared Content-Length
|
h11._util.LocalProtocolError
|
def redirect_headers(self, request: AsyncRequest, url: URL, method: str) -> Headers:
"""
Return the headers that should be used for the redirect request.
"""
headers = Headers(request.headers)
if url.origin != request.url.origin:
# Strip Authorization headers when responses are redirected away from
# the origin.
del headers["Authorization"]
del headers["Host"]
if method != request.method and method == "GET":
# If we've switch to a 'GET' request, then strip any headers which
# are only relevant to the request body.
del headers["Content-Length"]
del headers["Transfer-Encoding"]
return headers
|
def redirect_headers(self, request: AsyncRequest, url: URL) -> Headers:
"""
Strip Authorization headers when responses are redirected away from
the origin.
"""
headers = Headers(request.headers)
if url.origin != request.url.origin:
del headers["Authorization"]
del headers["host"]
return headers
|
https://github.com/encode/httpx/issues/309
|
DEBUG:asyncio:Using selector: KqueueSelector
DEBUG:httpx.dispatch.connection_pool:new_connection connection=HTTPConnection(origin=Origin(scheme='http' host='127.0.0.1' port=8000))
DEBUG:httpx.dispatch.connection:start_connect host='127.0.0.1' port=8000 timeout=TimeoutConfig(timeout=5.0)
DEBUG:httpx.dispatch.connection:connected http_version='HTTP/1.1'
DEBUG:httpx.dispatch.http11:send_headers method='POST' target='/debug' headers=Headers({'host': '127.0.0.1:8000', 'user-agent': 'python-httpx/0.7.2', 'accept': '*/*', 'content-length': '5', 'accept-encoding': 'gzip, deflate', 'connection': 'keep-alive'})
DEBUG:httpx.dispatch.http11:receive_event event=NEED_DATA
DEBUG:httpx.dispatch.http11:send_data data=Data(<5 bytes>)
DEBUG:httpx.dispatch.http11:receive_event event=Response(status_code=302, headers=[(b'date', b'Tue, 03 Sep 2019 03:50:53 GMT'), (b'server', b'uvicorn'), (b'location', b'https://httpbin.org/headers'), (b'transfer-encoding', b'chunked')], http_version=b'1.1', reason=b'Found')
DEBUG:httpx.dispatch.http11:receive_event event=EndOfMessage(headers=[])
DEBUG:httpx.dispatch.http11:response_closed our_state=DONE their_state=DONE
DEBUG:httpx.dispatch.connection_pool:release_connection connection=HTTPConnection(origin=Origin(scheme='http' host='127.0.0.1' port=8000))
DEBUG:httpx.dispatch.connection_pool:new_connection connection=HTTPConnection(origin=Origin(scheme='https' host='httpbin.org' port=443))
DEBUG:httpx.dispatch.connection:start_connect host='httpbin.org' port=443 timeout=TimeoutConfig(timeout=5.0)
DEBUG:httpx.dispatch.connection:connected http_version='HTTP/1.1'
DEBUG:httpx.dispatch.http11:send_headers method='GET' target='/headers' headers=Headers({'host': 'httpbin.org', 'user-agent': 'python-httpx/0.7.2', 'accept': '*/*', 'content-length': '5', 'accept-encoding': 'gzip, deflate', 'connection': 'keep-alive'})
DEBUG:httpx.dispatch.http11:receive_event event=NEED_DATA
Traceback (most recent call last):
File "http3/httpx/dispatch/http11.py", line 53, in send
http_version, status_code, headers = await self._receive_response(timeout)
File "http3/httpx/dispatch/http11.py", line 133, in _receive_response
event = await self._receive_event(timeout)
File "http3/httpx/dispatch/http11.py", line 174, in _receive_event
self.READ_NUM_BYTES, timeout, flag=self.timeout_flag
File "http3/httpx/concurrency/asyncio.py", line 92, in read
raise ReadTimeout() from None
http3.httpx.exceptions.ReadTimeout
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "test_proto.py", line 16, in <module>
asyncio.get_event_loop().run_until_complete(test())
File "/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "test_proto.py", line 11, in test
resp = await client.post(url, data='debug', allow_redirects=True)
File "http3/httpx/client.py", line 415, in post
trust_env=trust_env,
File "http3/httpx/client.py", line 566, in request
trust_env=trust_env,
File "http3/httpx/client.py", line 237, in send
return await get_response(request)
File "http3/httpx/middleware.py", line 72, in __call__
return await self(next_request, get_response)
File "http3/httpx/middleware.py", line 62, in __call__
response = await get_response(request)
File "http3/httpx/client.py", line 202, in get_response
request, verify=verify, cert=cert, timeout=timeout
File "http3/httpx/dispatch/connection_pool.py", line 126, in send
raise exc
File "http3/httpx/dispatch/connection_pool.py", line 121, in send
request, verify=verify, cert=cert, timeout=timeout
File "http3/httpx/dispatch/connection.py", line 65, in send
response = await self.h11_connection.send(request, timeout=timeout)
File "http3/httpx/dispatch/http11.py", line 53, in send
http_version, status_code, headers = await self._receive_response(timeout)
File "http3/httpx/concurrency/asyncio.py", line 285, in __aexit__
await self.task
File "http3/httpx/dispatch/http11.py", line 108, in _send_request_data
await self._send_event(event, timeout)
File "http3/httpx/dispatch/http11.py", line 123, in _send_event
bytes_to_send = self.h11_state.send(event)
File "site-packages/h11/_connection.py", line 469, in send
data_list = self.send_with_data_passthrough(event)
File "site-packages/h11/_connection.py", line 502, in send_with_data_passthrough
writer(event, data_list.append)
File "site-packages/h11/_writers.py", line 79, in __call__
self.send_eom(event.headers, write)
File "site-packages/h11/_writers.py", line 102, in send_eom
raise LocalProtocolError("Too little data for declared Content-Length")
h11._util.LocalProtocolError: Too little data for declared Content-Length
|
h11._util.LocalProtocolError
|
def import_user_module(args):
module_path = getattr(args, "user_dir", None)
if module_path is not None:
module_path = os.path.abspath(args.user_dir)
if not os.path.exists(module_path) and not os.path.isfile(
os.path.dirname(module_path)
):
fairseq_rel_path = os.path.join(os.path.dirname(__file__), args.user_dir)
if os.path.exists(fairseq_rel_path):
module_path = fairseq_rel_path
else:
fairseq_rel_path = os.path.join(
os.path.dirname(__file__), "..", args.user_dir
)
if os.path.exists(fairseq_rel_path):
module_path = fairseq_rel_path
else:
raise FileNotFoundError(module_path)
# ensure that user modules are only imported once
import_user_module.memo = getattr(import_user_module, "memo", set())
if module_path not in import_user_module.memo:
import_user_module.memo.add(module_path)
module_parent, module_name = os.path.split(module_path)
if module_name not in sys.modules:
sys.path.insert(0, module_parent)
importlib.import_module(module_name)
else:
raise ImportError(
"Failed to import --user-dir={} because the corresponding module name "
"({}) is not globally unique. Please rename the directory to "
"something unique and try again.".format(module_path, module_name)
)
|
def import_user_module(args):
module_path = getattr(args, "user_dir", None)
if module_path is not None:
module_path = os.path.abspath(args.user_dir)
if not os.path.exists(module_path):
fairseq_rel_path = os.path.join(os.path.dirname(__file__), args.user_dir)
if os.path.exists(fairseq_rel_path):
module_path = fairseq_rel_path
else:
fairseq_rel_path = os.path.join(
os.path.dirname(__file__), "..", args.user_dir
)
if os.path.exists(fairseq_rel_path):
module_path = fairseq_rel_path
else:
raise FileNotFoundError(module_path)
# ensure that user modules are only imported once
import_user_module.memo = getattr(import_user_module, "memo", set())
if module_path not in import_user_module.memo:
import_user_module.memo.add(module_path)
module_parent, module_name = os.path.split(module_path)
if module_name not in sys.modules:
sys.path.insert(0, module_parent)
importlib.import_module(module_name)
else:
raise ImportError(
"Failed to import --user-dir={} because the corresponding module name "
"({}) is not globally unique. Please rename the directory to "
"something unique and try again.".format(module_path, module_name)
)
|
https://github.com/pytorch/fairseq/issues/3017
|
Traceback (most recent call last):
File "/home/ubuntu/venv/bin/fairseq-preprocess", line 8, in <module>
sys.exit(cli_main())
File "/home/ubuntu/venv/lib/python3.6/site-packages/fairseq_cli/preprocess.py", line 392, in cli_main
parser = options.get_preprocessing_parser()
File "/home/ubuntu/venv/lib/python3.6/site-packages/fairseq/options.py", line 28, in get_preprocessing_parser
parser = get_parser("Preprocessing", default_task)
File "/home/ubuntu/venv/lib/python3.6/site-packages/fairseq/options.py", line 210, in get_parser
utils.import_user_module(usr_args)
File "/home/ubuntu/venv/lib/python3.6/site-packages/fairseq/utils.py", line 447, in import_user_module
raise FileNotFoundError(module_path)
FileNotFoundError: /home/ubuntu/my-custom-model.zip/myexample
|
FileNotFoundError
|
def reduce_metrics(logging_outputs) -> None:
"""Aggregate logging outputs from data parallel training."""
loss_sum = sum(log.get("loss", 0) for log in logging_outputs)
ntokens = sum(log.get("ntokens", 0) for log in logging_outputs)
sample_size = sum(log.get("sample_size", 0) for log in logging_outputs)
# we divide by log(2) to convert the loss from base e to base 2
metrics.log_scalar(
"loss", loss_sum / sample_size / math.log(2), sample_size, round=3
)
if sample_size != ntokens:
metrics.log_scalar(
"nll_loss", loss_sum / ntokens / math.log(2), ntokens, round=3
)
metrics.log_derived(
"ppl", lambda meters: utils.get_perplexity(meters["nll_loss"].avg)
)
else:
metrics.log_derived(
"ppl", lambda meters: utils.get_perplexity(meters["loss"].avg)
)
|
def reduce_metrics(logging_outputs) -> None:
"""Aggregate logging outputs from data parallel training."""
loss_sum = sum(log.get("loss", 0) for log in logging_outputs)
ntokens = sum(log.get("ntokens", 0) for log in logging_outputs)
sample_size = sum(log.get("sample_size", 0) for log in logging_outputs)
metrics.log_scalar(
"loss", loss_sum / sample_size / math.log(2), sample_size, round=3
)
if sample_size != ntokens:
metrics.log_scalar(
"nll_loss", loss_sum / ntokens / math.log(2), ntokens, round=3
)
metrics.log_derived(
"ppl", lambda meters: utils.get_perplexity(meters["nll_loss"].avg)
)
else:
metrics.log_derived(
"ppl", lambda meters: utils.get_perplexity(meters["loss"].avg)
)
|
https://github.com/pytorch/fairseq/issues/2811
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.6/site-packages/torch/multiprocessing/spawn.py", line 20, in _wrap
fn(i, *args)
File "/workspace/fairseq/fairseq/distributed_utils.py", line 283, in distributed_main
main(cfg, **kwargs)
File "/workspace/fairseq/fairseq_cli/train.py", line 124, in main
valid_losses, should_stop = train(cfg, trainer, task, epoch_itr)
File "/opt/conda/lib/python3.6/contextlib.py", line 52, in inner
return func(*args, **kwds)
File "/workspace/fairseq/fairseq_cli/train.py", line 202, in train
log_output = trainer.train_step(samples)
File "/opt/conda/lib/python3.6/contextlib.py", line 52, in inner
return func(*args, **kwds)
File "/workspace/fairseq/fairseq/trainer.py", line 459, in train_step
self.zero_grad()
File "/workspace/fairseq/fairseq/trainer.py", line 783, in zero_grad
self.optimizer.zero_grad()
File "/workspace/fairseq/fairseq/optim/fp16_optimizer.py", line 218, in zero_grad
p32.grad.zero_()
AttributeError: 'NoneType' object has no attribute 'zero_'
|
AttributeError
|
def zero_grad(self):
"""Clears the gradients of all optimized parameters."""
for p in self.fp16_params:
p.grad = None
if self.has_flat_params:
if torch.is_tensor(self.fp32_params):
self.fp32_params.grad.zero_()
elif isinstance(self.fp32_params, dict):
for fp32_params in self.fp32_params.values():
fp32_params.grad.zero_()
else:
raise RuntimeError("self.fp32_params must be a tensor or dict")
else:
for p32 in self.fp32_params:
if p32.grad:
p32.grad.zero_()
self._needs_sync = False
if self.scaler is not None:
self._multiply_factor = 1.0 / float(self.scaler.loss_scale)
|
def zero_grad(self):
"""Clears the gradients of all optimized parameters."""
for p in self.fp16_params:
p.grad = None
if self.has_flat_params:
if torch.is_tensor(self.fp32_params):
self.fp32_params.grad.zero_()
elif isinstance(self.fp32_params, dict):
for fp32_params in self.fp32_params.values():
fp32_params.grad.zero_()
else:
raise RuntimeError("self.fp32_params must be a tensor or dict")
else:
for p32 in self.fp32_params:
p32.grad.zero_()
self._needs_sync = False
if self.scaler is not None:
self._multiply_factor = 1.0 / float(self.scaler.loss_scale)
|
https://github.com/pytorch/fairseq/issues/2811
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.6/site-packages/torch/multiprocessing/spawn.py", line 20, in _wrap
fn(i, *args)
File "/workspace/fairseq/fairseq/distributed_utils.py", line 283, in distributed_main
main(cfg, **kwargs)
File "/workspace/fairseq/fairseq_cli/train.py", line 124, in main
valid_losses, should_stop = train(cfg, trainer, task, epoch_itr)
File "/opt/conda/lib/python3.6/contextlib.py", line 52, in inner
return func(*args, **kwds)
File "/workspace/fairseq/fairseq_cli/train.py", line 202, in train
log_output = trainer.train_step(samples)
File "/opt/conda/lib/python3.6/contextlib.py", line 52, in inner
return func(*args, **kwds)
File "/workspace/fairseq/fairseq/trainer.py", line 459, in train_step
self.zero_grad()
File "/workspace/fairseq/fairseq/trainer.py", line 783, in zero_grad
self.optimizer.zero_grad()
File "/workspace/fairseq/fairseq/optim/fp16_optimizer.py", line 218, in zero_grad
p32.grad.zero_()
AttributeError: 'NoneType' object has no attribute 'zero_'
|
AttributeError
|
def add_args(parser):
"""Add arguments to the parser for this LR scheduler."""
# fmt: off
parser.add_argument('--force-anneal', '--fa', type=int, metavar='N',
help='force annealing at specified epoch (epochs start at 1)')
parser.add_argument('--lr-shrink', default=0.1, type=float, metavar='LS',
help='shrink factor for annealing, lr_new = (lr * lr_shrink)')
parser.add_argument('--warmup-updates', default=0, type=int, metavar='N',
help='warmup the learning rate linearly for the first N updates')
|
def add_args(parser):
"""Add arguments to the parser for this LR scheduler."""
# fmt: off
parser.add_argument('--force-anneal', '--fa', type=int, metavar='N',
help='force annealing at specified epoch')
parser.add_argument('--lr-shrink', default=0.1, type=float, metavar='LS',
help='shrink factor for annealing, lr_new = (lr * lr_shrink)')
parser.add_argument('--warmup-updates', default=0, type=int, metavar='N',
help='warmup the learning rate linearly for the first N updates')
|
https://github.com/pytorch/fairseq/issues/2811
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.6/site-packages/torch/multiprocessing/spawn.py", line 20, in _wrap
fn(i, *args)
File "/workspace/fairseq/fairseq/distributed_utils.py", line 283, in distributed_main
main(cfg, **kwargs)
File "/workspace/fairseq/fairseq_cli/train.py", line 124, in main
valid_losses, should_stop = train(cfg, trainer, task, epoch_itr)
File "/opt/conda/lib/python3.6/contextlib.py", line 52, in inner
return func(*args, **kwds)
File "/workspace/fairseq/fairseq_cli/train.py", line 202, in train
log_output = trainer.train_step(samples)
File "/opt/conda/lib/python3.6/contextlib.py", line 52, in inner
return func(*args, **kwds)
File "/workspace/fairseq/fairseq/trainer.py", line 459, in train_step
self.zero_grad()
File "/workspace/fairseq/fairseq/trainer.py", line 783, in zero_grad
self.optimizer.zero_grad()
File "/workspace/fairseq/fairseq/optim/fp16_optimizer.py", line 218, in zero_grad
p32.grad.zero_()
AttributeError: 'NoneType' object has no attribute 'zero_'
|
AttributeError
|
def get_next_lr(self, epoch):
lrs = self.args.lr
if self.args.force_anneal is None or epoch < self.args.force_anneal:
# use fixed LR schedule
next_lr = lrs[min(epoch - 1, len(lrs) - 1)]
else:
# annneal based on lr_shrink
next_lr = lrs[-1] * self.args.lr_shrink ** (epoch + 1 - self.args.force_anneal)
return next_lr
|
def get_next_lr(self, epoch):
lrs = self.args.lr
if self.args.force_anneal is None or epoch < self.args.force_anneal:
# use fixed LR schedule
next_lr = lrs[min(epoch, len(lrs) - 1)]
else:
# annneal based on lr_shrink
next_lr = lrs[-1] * self.args.lr_shrink ** (epoch + 1 - self.args.force_anneal)
return next_lr
|
https://github.com/pytorch/fairseq/issues/2811
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.6/site-packages/torch/multiprocessing/spawn.py", line 20, in _wrap
fn(i, *args)
File "/workspace/fairseq/fairseq/distributed_utils.py", line 283, in distributed_main
main(cfg, **kwargs)
File "/workspace/fairseq/fairseq_cli/train.py", line 124, in main
valid_losses, should_stop = train(cfg, trainer, task, epoch_itr)
File "/opt/conda/lib/python3.6/contextlib.py", line 52, in inner
return func(*args, **kwds)
File "/workspace/fairseq/fairseq_cli/train.py", line 202, in train
log_output = trainer.train_step(samples)
File "/opt/conda/lib/python3.6/contextlib.py", line 52, in inner
return func(*args, **kwds)
File "/workspace/fairseq/fairseq/trainer.py", line 459, in train_step
self.zero_grad()
File "/workspace/fairseq/fairseq/trainer.py", line 783, in zero_grad
self.optimizer.zero_grad()
File "/workspace/fairseq/fairseq/optim/fp16_optimizer.py", line 218, in zero_grad
p32.grad.zero_()
AttributeError: 'NoneType' object has no attribute 'zero_'
|
AttributeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.