after_merge stringlengths 28 79.6k | before_merge stringlengths 20 79.6k | url stringlengths 38 71 | full_traceback stringlengths 43 922k | traceback_type stringclasses 555
values |
|---|---|---|---|---|
def plot(result_pickle_file_path, show, plot_save_file):
"""
[sys_analyser] draw result DataFrame
"""
import pandas as pd
from .plot import plot_result
result_dict = pd.read_pickle(result_pickle_file_path)
plot_result(result_dict, show, plot_save_file)
| def plot(result_dict_file, show, plot_save_file):
"""
[sys_analyser] draw result DataFrame
"""
import pandas as pd
from .plot import plot_result
result_dict = pd.read_pickle(result_dict_file)
plot_result(result_dict, show, plot_save_file)
| https://github.com/ricequant/rqalpha/issues/109 | Traceback (most recent call last):
File "c:\programdata\anaconda2\lib\runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "c:\programdata\anaconda2\lib\runpy.py", line 72, in _run_code
exec code in run_globals
β β {'__builtins__': <module '__builtin__' (built-in)>, '__file__': 'C... | TypeError |
def stream_logs(self):
"""Stream a pod's log."""
for line in self.api.read_namespaced_pod_log(
self.name, self.namespace, follow=True, _preload_content=False
):
# verify that the line is JSON
line = line.decode("utf-8")
try:
json.loads(line)
except ValueEr... | def stream_logs(self):
"""Stream a pod's log."""
for line in self.api.read_namespaced_pod_log(
self.name, self.namespace, follow=True, _preload_content=False
):
self.progress("log", line.decode("utf-8"))
| https://github.com/jupyterhub/binderhub/issues/164 | / # jupyter-repo2docker https://github.com/yuvipanda/example-requirements --json-logs
Traceback (most recent call last):
File "/usr/local/bin/jupyter-repo2docker", line 11, in <module>
load_entry_point('jupyter-repo2docker==0.4.1', 'console_scripts', 'jupyter-repo2docker')()
File "/usr/local/lib/python3.6/site-packages... | FileNotFoundError |
def addRecentProjectFile(self, projectFile):
projectFile = QUrl(projectFile).toLocalFile()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# ... | def addRecentProjectFile(self, projectFile):
projectFile = QUrl(projectFile).path()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# remove ... | https://github.com/alicevision/meshroom/issues/912 | [2020-05-23 16:12:48,660][ERROR] Traceback (most recent call last):
File "D:\Meshroom_Src\meshroom\meshroom\ui\reconstruction.py", line 432, in load
super(Reconstruction, self).load(filepath, setupProjectFile)
File "D:\Meshroom_Src\meshroom\meshroom\ui\graph.py", line 314, in load
g.load(filepath, setupProjectFile)
Fil... | OSError |
def addSfmAugmentation(self, withMVS=False):
"""
Create a new augmentation step connected to the last SfM node of this Reconstruction and
return the created CameraInit and SfM nodes.
If the Reconstruction is not initialized (empty initial CameraInit), this method won't
create anything and return in... | def addSfmAugmentation(self, withMVS=False):
"""
Create a new augmentation step connected to the last SfM node of this Reconstruction and
return the created CameraInit and SfM nodes.
If the Reconstruction is not initialized (empty initial CameraInit), this method won't
create anything and return in... | https://github.com/alicevision/meshroom/issues/127 | Traceback (most recent call last):
File "C:\Users\andre\work\meshroom\meshroom\ui\reconstruction.py", line 72, in start
raise RuntimeError("Invalid folder provided: {}".format(folder))
RuntimeError: Invalid folder provided: /F:/ai-ml-models/images/live | RuntimeError |
def load_pymathics_doc(self):
if self.pymathics_doc_loaded:
return
from mathics.settings import default_pymathics_modules
pymathicspart = None
# Look the "Pymathics Modules" part, and if it does not exist, create it.
for part in self.parts:
if part.title == "Pymathics Modules":
... | def load_pymathics_doc(self):
if self.pymathics_doc_loaded:
return
from mathics.settings import default_pymathics_modules
pymathicspart = None
# Look the "Pymathics Modules" part, and if it does not exist, create it.
for part in self.parts:
if part.title == "Pymathics Modules":
... | https://github.com/mathics/Mathics/issues/906 | $ mathicsserver
warning: database file /home/pablo/.local/var/mathics/mathics.sqlite not found
Migrating database /home/pablo/.local/var/mathics/mathics.sqlite
Traceback (most recent call last):
File "/home/pablo/Documents/Mathics/mathics/manage.py", line 13, in <module>
execute_from_command_line(sys.argv)
File "/home... | KeyError |
def __init__(self, module=None):
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
self.doc_dir = None
self.xml_data_file = None
self.tex_data_file = None
self.latex_file = None
self.symbols = {}
if module is None:
return
import importlib
# Load the mo... | def __init__(self, module=None):
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
self.doc_dir = None
self.xml_data_file = None
self.tex_data_file = None
self.latex_file = None
self.symbols = {}
if module is None:
return
import importlib
# Load the mo... | https://github.com/mathics/Mathics/issues/906 | $ mathicsserver
warning: database file /home/pablo/.local/var/mathics/mathics.sqlite not found
Migrating database /home/pablo/.local/var/mathics/mathics.sqlite
Traceback (most recent call last):
File "/home/pablo/Documents/Mathics/mathics/manage.py", line 13, in <module>
execute_from_command_line(sys.argv)
File "/home... | KeyError |
def clear_pymathics_modules(self):
from mathics.builtin import builtins, builtins_by_module
# Remove all modules that are not in mathics
# print("cleaning pymathics modules")
for key in list(builtins_by_module.keys()):
if not key.startswith("mathics."):
print(f'removing module "{key... | def clear_pymathics_modules(self):
from mathics.builtin import builtins, builtins_by_module
# Remove all modules that are not in mathics
# print("cleaning pymathics modules")
for key in list(builtins_by_module.keys()):
if key[:8] != "mathics.":
print("removing module ", key, " not i... | https://github.com/mathics/Mathics/issues/836 | Mathics 1.1.dev0
on CPython 3.6.9 (default, Jul 17 2020, 12:50:27)
using SymPy 1.6.2, mpmath 1.1.0
Copyright (C) 2011-2020 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license... | ValueError |
def apply(self, evaluation):
"Exit"
exit()
| def apply(self, evaluation):
"Exit[]"
sys.exit()
| https://github.com/mathics/Mathics/issues/813 | Copyright (C) 2011-2016 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= Quit[]
Traceback (most recent call last):
File "~/Documents/M... | NameError |
def apply_n(self, n, evaluation):
"Exit[n_Integer]"
exit(n.get_int_value())
| def apply_n(self, n, evaluation):
"Exit[n_Integer]"
sys.exit(n.get_int_value())
| https://github.com/mathics/Mathics/issues/813 | Copyright (C) 2011-2016 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= Quit[]
Traceback (most recent call last):
File "~/Documents/M... | NameError |
def apply(self, url, elements, evaluation):
"FetchURL[url_String, elements_]"
import tempfile
import os
py_url = url.get_string_value()
temp_handle, temp_path = tempfile.mkstemp(suffix="")
try:
f = urllib2.urlopen(py_url)
try:
if sys.version_info >= (3, 0):
... | def apply(self, url, elements, evaluation):
"FetchURL[url_String, elements_]"
import tempfile
import os
py_url = url.get_string_value()
temp_handle, temp_path = tempfile.mkstemp(suffix="")
try:
with urllib2.urlopen(py_url) as f:
content_type = f.info().get_content_type()
... | https://github.com/mathics/Mathics/issues/562 | In[1]:= Import["https://upload.wikimedia.org/wikipedia/en/2/24/Lenna.png"]
Traceback (most recent call last):
File "/home/angus/venv_pypy/bin/mathics", line 11, in <module>
load_entry_point('Mathics', 'console_scripts', 'mathics')()
File "/home/angus/Mathics/mathics/main.py", line 286, in main
result = evaluation.evalu... | AttributeError |
def _get_system_stats(self):
with ConnectTo(StatisticDbViewer, self._config) as stats_db:
backend_data = stats_db.get_statistic("backend")
try:
return {
"backend_cpu_percentage": "{}%".format(
backend_data["system"]["cpu_percentage"]
),
"number... | def _get_system_stats(self):
with ConnectTo(StatisticDbViewer, self._config) as stats_db:
backend_data = stats_db.get_statistic("backend")
return {
"backend_cpu_percentage": backend_data["system"]["cpu_percentage"],
"number_of_running_analyses": len(backend_data["analysis"]["current_ana... | https://github.com/fkie-cad/FACT_core/issues/448 | [2020-07-07 09:46:38,595] ERROR in app: Exception on /ajax/stats/system [GET]
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1952, in full_dispa... | KeyError |
def _install_css_and_js_files():
with OperateInDirectory("../web_interface/static"):
os.makedirs("web_css", exist_ok=True)
os.makedirs("web_js", exist_ok=True)
wget_static_web_content(
"https://github.com/vakata/jstree/zipball/3.3.9",
".",
[
... | def _install_css_and_js_files():
with OperateInDirectory("../web_interface/static"):
os.makedirs("web_css", exist_ok=True)
os.makedirs("web_js", exist_ok=True)
wget_static_web_content(
"https://github.com/vakata/jstree/zipball/3.3.9",
".",
["unzip 3.3.9",... | https://github.com/fkie-cad/FACT_core/issues/392 | [2020-04-16 10:42:50][frontend][INFO]: Install static jstree content
Traceback (most recent call last):
File "src/install.py", line 173, in <module>
install()
File "src/install.py", line 157, in install
frontend(not args.no_radare, args.nginx)
File "/home/weidenba/FACT_core/src/install/frontend.py", line 165, in main
_... | helperFunctions.install.InstallationError |
def get_stats_pie(self, result, stats):
pie_invalid, pie_off, pie_on, pie_partial = self.extract_pie_data_from_analysis(
result
)
total_amount_of_files = self.calculate_total_files_for_pie(
[pie_off, pie_on, pie_partial, pie_invalid]
)
self.append_pie_stats_to_result_dict(
pi... | def get_stats_pie(self, result, stats):
pie_invalid, pie_off, pie_on, pie_partial = self.extract_pie_data_from_analysis(
result
)
total_amount_of_files = self.calculate_total_files_for_pie(
pie_off, pie_on, pie_partial, pie_invalid
)
self.append_pie_stats_to_result_dict(
pie_... | https://github.com/fkie-cad/FACT_core/issues/88 | [2018-03-28 13:02:04][update_statistic][INFO]: Try to start Mongo Server...
[2018-03-28 13:02:04][MongoMgr][INFO]: start local mongo database
Traceback (most recent call last):
File "src/update_statistic.py", line 48, in <module>
sys.exit(main())
File "src/update_statistic.py", line 38, in main
updater.update_all_stats... | IndexError |
def calculate_total_files_for_pie(pie_stats):
total_amount_of_files = 0
for item in pie_stats:
with suppress(IndexError):
total_amount_of_files += item[0][1]
return total_amount_of_files
| def calculate_total_files_for_pie(pie_off, pie_on, pie_partial, pie_invalid):
if (
len(pie_on) > 0
or len(pie_off) > 0
or len(pie_partial) > 0
or len(pie_invalid) > 0
):
total_amount_of_files = (
pie_on[0][1] + pie_off[0][1] + pie_partial[0][1] + pie_invalid[0... | https://github.com/fkie-cad/FACT_core/issues/88 | [2018-03-28 13:02:04][update_statistic][INFO]: Try to start Mongo Server...
[2018-03-28 13:02:04][MongoMgr][INFO]: start local mongo database
Traceback (most recent call last):
File "src/update_statistic.py", line 48, in <module>
sys.exit(main())
File "src/update_statistic.py", line 38, in main
updater.update_all_stats... | IndexError |
def __init__(
self, X, sensitive_features, y, estimator, constraints, eps, B, opt_lambda=True
):
self.X = X
self.constraints = constraints
self.constraints.load_data(X, y, sensitive_features=sensitive_features)
self.obj = self.constraints.default_objective()
self.obj.load_data(X, y, sensitive_fe... | def __init__(
self, X, sensitive_features, y, estimator, constraints, eps, B, opt_lambda=True
):
self.X = X
self.constraints = constraints
self.constraints.load_data(X, y, sensitive_features=sensitive_features)
self.obj = self.constraints.default_objective()
self.obj.load_data(X, y, sensitive_fe... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def _call_oracle(self, lambda_vec):
signed_weights = self.obj.signed_weights() + self.constraints.signed_weights(
lambda_vec
)
redY = 1 * (signed_weights > 0)
redW = signed_weights.abs()
redW = self.n * redW / redW.sum()
redY_unique = np.unique(redY)
classifier = None
if len(re... | def _call_oracle(self, lambda_vec):
signed_weights = self.obj.signed_weights() + self.constraints.signed_weights(
lambda_vec
)
redY = 1 * (signed_weights > 0)
redW = signed_weights.abs()
redW = self.n * redW / redW.sum()
classifier = pickle.loads(self.pickled_estimator)
oracle_call_... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def __init__(self, estimator, constraints, eps=0.01, T=50, nu=None, eta_mul=2.0): # noqa: D103
self._estimator = estimator
self._constraints = constraints
self._eps = eps
self._T = T
self._nu = nu
self._eta_mul = eta_mul
self._best_gap = None
self._predictors = None
self._weights =... | def __init__(self, estimator, constraints, eps=0.01, T=50, nu=None, eta_mul=2.0): # noqa: D103
self._estimator = estimator
self._constraints = constraints
self._eps = eps
self._T = T
self._nu = nu
self._eta_mul = eta_mul
self._best_gap = None
self._predictors = None
self._weights =... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def fit(self, X, y, **kwargs):
"""Return a fair classifier under specified fairness constraints.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
"""
_, y_train, sensitive_fea... | def fit(self, X, y, **kwargs):
"""Return a fair classifier under specified fairness constraints.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
"""
_, y_train, sensitive_fea... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def fit(self, X, y, **kwargs):
"""Run the grid search.
This will result in multiple copies of the
estimator being made, and the :code:`fit(X)` method
of each one called.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.n... | def fit(self, X, y, **kwargs):
"""Run the grid search.
This will result in multiple copies of the
estimator being made, and the :code:`fit(X)` method
of each one called.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.n... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def check_value_shape(self, value, slice_):
"""Checks if value can be set to the slice"""
if None not in self.shape and self.dtype != "O":
if not all([isinstance(sh, int) for sh in slice_]):
expected_value_shape = tuple(
[
len(range(*slice_shape.indices(se... | def check_value_shape(self, value, slice_):
"""Checks if value can be set to the slice"""
if None not in self.shape and self.dtype != "O":
if not all([isinstance(sh, int) for sh in slice_]):
expected_value_shape = tuple(
[
len(range(*slice_shape.indices(se... | https://github.com/activeloopai/Hub/issues/316 | Traceback (most recent call last):
File "examples/upload_mpi.py", line 52, in <module>
res_ds = out_ds.store(tag)
File "/Hub/hub/compute/transform.py", line 372, in store
n_results = self.store_shard(ds_in_shard, ds_out, start, token=token)
File "/Hub/hub/compute/transform.py", line 288, in store_shard
self.upload(
Fil... | AttributeError |
def __init__(
self,
url: str,
mode: str = "a",
shape=None,
schema=None,
token=None,
fs=None,
fs_map=None,
cache: int = defaults.DEFAULT_MEMORY_CACHE_SIZE,
storage_cache: int = defaults.DEFAULT_STORAGE_CACHE_SIZE,
lock_cache=True,
tokenizer=None,
):
"""| Open a new or ... | def __init__(
self,
url: str,
mode: str = "a",
safe_mode: bool = False,
shape=None,
schema=None,
token=None,
fs=None,
fs_map=None,
cache: int = 2**26,
storage_cache: int = 2**28,
lock_cache=True,
tokenizer=None,
):
"""| Open a new or existing dataset for read/writ... | https://github.com/activeloopai/Hub/issues/318 | Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumen... | hub.exceptions.ShapeArgumentNotFoundException |
def _check_and_prepare_dir(self):
"""
Checks if input data is ok.
Creates or overwrites dataset folder.
Returns True dataset needs to be created opposed to read.
"""
fs, path, mode = self._fs, self._path, self._mode
if path.startswith("s3://"):
with open(posixpath.expanduser("~/.acti... | def _check_and_prepare_dir(self):
"""
Checks if input data is ok.
Creates or overwrites dataset folder.
Returns True dataset needs to be created opposed to read.
"""
fs, path, mode = self._fs, self._path, self.mode
if path.startswith("s3://"):
with open(posixpath.expanduser("~/.activ... | https://github.com/activeloopai/Hub/issues/318 | Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumen... | hub.exceptions.ShapeArgumentNotFoundException |
def resize_shape(self, size: int) -> None:
"""Resize the shape of the dataset by resizing each tensor first dimension"""
if size == self.shape[0]:
return
self._shape = (int(size),)
self.meta = self._store_meta()
for t in self._tensors.values():
t.resize_shape(int(size))
self._u... | def resize_shape(self, size: int) -> None:
"""Resize the shape of the dataset by resizing each tensor first dimension"""
if size == self.shape[0]:
return
self.shape = (int(size),)
self.meta = self._store_meta()
for t in self._tensors.values():
t.resize_shape(int(size))
self._up... | https://github.com/activeloopai/Hub/issues/318 | Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumen... | hub.exceptions.ShapeArgumentNotFoundException |
def _get_max_shape(self, shape, max_shape):
if max_shape is None:
return tuple([s or self._int32max for s in shape])
elif isinstance(max_shape, int):
assert max_shape == shape[0]
return self._get_max_shape(shape, None)
else:
max_shape = tuple(max_shape)
assert len(sha... | def _get_max_shape(self, shape, max_shape):
if max_shape is None:
return tuple([s or self._int32max for s in shape])
elif isinstance(max_shape, int):
assert max_shape == shape[0]
return self._get_max_shape(shape, None)
else:
max_shape = tuple(max_shape)
assert len(sha... | https://github.com/activeloopai/Hub/issues/298 | β feature_testing python upload_animals.py
26180
{'labels': ClassLabel(shape=(), dtype='int64', names=['pecora', 'mucca', 'cane', 'ragno', 'cavallo', 'elefante', 'gallina', 'gatto', 'scoiattolo', 'farfalla'], num_classes=10), 'image': Image(shape=(120, 120, 3), dtype='uint8', max_shape=(120, 120, 4))}
ClassLabel(shape... | AssertionError |
def verify_cli_version():
os.environ["OUTDATED_IGNORE"] = 1
try:
version = pkg_resources.get_distribution(hub.__name__).version
is_outdated, latest_version = check_outdated(hub.__name__, version)
if is_outdated:
print(
"\033[93m"
+ "Hub is out ... | def verify_cli_version():
try:
version = pkg_resources.get_distribution(hub.__name__).version
is_outdated, latest_version = check_outdated(hub.__name__, version)
if is_outdated:
print(
"\033[93m"
+ "Hub is out of date. Please upgrade the package by... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def check_response_status(self, response):
"""
Check response status and throw corresponding exception on failure
"""
code = response.status_code
if code < 200 or code >= 300:
try:
message = response.json()["description"]
except Exception:
message = " "
... | def check_response_status(self, response):
"""
Check response status and throw corresponding exception on failure
"""
code = response.status_code
if code < 200 or code >= 300:
try:
message = response.json()["error"]
except Exception:
message = " "
log... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def init(
token: str = "",
cloud=False,
n_workers=1,
memory_limit=None,
processes=False,
threads_per_worker=1,
distributed=True,
):
"""Initializes cluster either local or on the cloud
Parameters
----------
token: str
token provided by snark
cache: float
A... | def init(
token: str = "",
cloud=False,
n_workers=1,
memory_limit=None,
processes=False,
threads_per_worker=1,
distributed=True,
):
"""Initializes cluster either local or on the cloud
Parameters
----------
token: str
token provided by snark
cache: float
A... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def generate(generator: DatasetGenerator, input) -> Dataset:
"""Generates dataset based on DatabaseGenerator class instance and iterable input
For every element in input runs generators __call__ function.
That function should return dict of numpy arrays containing single or multiple outputs for axis 0 of ge... | def generate(generator: DatasetGenerator, input) -> Dataset:
"""Generates dataset based on DatabaseGenerator class instance and iterable input
For every element in input runs generators __call__ function.
That function should return dict of numpy arrays containing single or multiple outputs for axis 0 of ge... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def concat(datasets: Iterable[Dataset]) -> Dataset:
"""Concats multiple datasets into one along axis 0
This is equivalent to concat every tensor with the same key
"""
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
... | def concat(datasets: Iterable[Dataset]) -> Dataset:
"""Concats multiple datasets into one along axis 0
This is equivalent to concat every tensor with the same key
"""
keys = [sorted(dataset._tensors.keys()) for dataset in datasets]
for key in keys:
assert key == keys[0]
keys = keys[0]
... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def __init__(self, tensors: Dict[str, Tensor], metainfo=dict()):
"""Creates dict given dict of tensors (name -> Tensor key value pairs)"""
self._tensors = tensors
self._metainfo = metainfo
shape = None
for name, tensor in tensors.items():
if shape is None or tensor.ndim > len(shape):
... | def __init__(self, tensors: Dict[str, Tensor], metainfo=dict()):
"""Creates dict given dict of tensors (name -> Tensor key value pairs)"""
self._tensors = tensors
self._metainfo = metainfo
shape = None
for name, tensor in tensors.items():
if shape is None or tensor.ndim > len(shape):
... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def to_pytorch(self, transform=None, max_text_len=30):
"""
Transforms into pytorch dataset
Parameters
----------
transform: func
any transform that takes input a dictionary of a sample and returns transformed dictionary
max_text_len: integer
the maximum length of text strings th... | def to_pytorch(self, transform=None, max_text_len=30):
"""
Transforms into pytorch dataset
Parameters
----------
transform: func
any transform that takes input a dictionary of a sample and returns transformed dictionary
max_text_len: integer
the maximum length of text strings th... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def to_tensorflow(self, max_text_len=30):
"""
Transforms into tensorflow dataset
Parameters
----------
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import tensorflow as tf
except Import... | def to_tensorflow(self, max_text_len=30):
"""
Transforms into tensorflow dataset
Parameters
----------
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import tensorflow as tf
except Import... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
logger.log(e)
return tf.variant
| def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
return tf.variant
| https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def load(tag, creds=None, session_creds=True) -> Dataset:
"""Load a dataset from repository using given url and credentials (optional)"""
fs, path = _load_fs_and_path(tag, creds, session_creds=session_creds)
fs: fsspec.AbstractFileSystem = fs
path_2 = f"{path}/meta.json"
if not fs.exists(path):
... | def load(tag, creds=None, session_creds=True) -> Dataset:
"""Load a dataset from repository using given url and credentials (optional)"""
fs, path = _load_fs_and_path(tag, creds, session_creds=session_creds)
fs: fsspec.AbstractFileSystem = fs
path_2 = f"{path}/meta.json"
if not fs.exists(path):
... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def from_array(array, dtag=None, dcompress=None, chunksize=None) -> Tensor:
"""Generates tensor from arraylike object
Parameters
----------
array : np.ndarray
Numpy array like object with shape, dtype, dims
dtag : str, optional
Describes type of the data stored in this array (image, ... | def from_array(array, dtag=None, dcompress=None, chunksize=None) -> Tensor:
"""Generates tensor from arraylike object
Parameters
----------
array : np.ndarray
Numpy array like object with shape, dtype, dims
dtag : str, optional
Describes type of the data stored in this array (image, ... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def __init__(self, meta: dict, daskarray, delayed_objs: tuple = None):
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
if not meta.get("preprocessed"):
meta = Tensor._preprocess_meta(meta, daska... | def __init__(self, meta: dict, daskarray, delayed_objs: tuple = None):
if not meta.get("preprocessed"):
meta = Tensor._preprocess_meta(meta, daskarray)
self._meta = meta
self._array = daskarray
self._delayed_objs = delayed_objs
self._shape = _dask_shape_backward(daskarray.shape)
self._dt... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def __init__(self, response):
message = f"No permision to store the dataset at {response}"
super(PermissionException, self).__init__(message=message)
| def __init__(self, response):
message = f"No permision to store the dataset at {response}"
super().__init__(message=message)
| https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def _flatten(list_):
"""
Helper function to flatten the list
"""
return [item for sublist in list_ for item in sublist]
| def _flatten(l):
"""
Helper function to flatten the list
"""
return [item for sublist in l for item in sublist]
| https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def get_argnames(func):
"""Introspecs the arguments of a callable.
Args:
func: The callable to introspect
Returns:
A list of argument names, excluding *arg and **kwargs
arguments.
"""
if six.PY2:
func_object = _get_func_if_nested(func)
spec = _get_argspec(f... | def get_argnames(func):
"""Introspecs the arguments of a callable.
Args:
func: The callable to introspect
Returns:
A list of argument names, excluding *arg and **kwargs
arguments.
"""
if six.PY2:
func_object = _get_func_if_nested(func)
spec = _get_argspec(f... | https://github.com/falconry/falcon/issues/1254 | (falcon-bug-repro) falcon-bug-repro Β» python main.py
Traceback (most recent call last):
File "main.py", line 19, in <module>
MyMiddleware(),
File "/Users/joshklar/.virtualenvs/falcon-bug-repro/lib/python3.6/site-packages/falcon/api.py", line 156, in __init__
middleware, independent_middleware=independent_middleware)
... | IndexError |
def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restr... | def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restr... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Set-Cookie being the notable exceptions.
Warning:
... | def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Set-Cookie being the notable exceptions.
Warning:
... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and ... | def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and ... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def add_link(
self,
target,
rel,
title=None,
title_star=None,
anchor=None,
hreflang=None,
type_hint=None,
):
"""
Add a link header to the response.
See also: https://tools.ietf.org/html/rfc5988
Note:
Calling this method repeatedly will cause each link to be
... | def add_link(
self,
target,
rel,
title=None,
title_star=None,
anchor=None,
hreflang=None,
type_hint=None,
):
"""
Add a link header to the response.
See also: https://tools.ietf.org/html/rfc5988
Note:
Calling this method repeatedly will cause each link to be
... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def header_property(name, doc, transform=None):
"""Creates a header getter/setter.
Args:
name: Header name, e.g., "Content-Type"
doc: Docstring for the property
transform: Transformation function to use when setting the
property. The value will be passed to the function, and... | def header_property(name, doc, transform=None):
"""Creates a header getter/setter.
Args:
name: Header name, e.g., "Content-Type"
doc: Docstring for the property
transform: Transformation function to use when setting the
property. The value will be passed to the function, and... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def fset(self, value):
self._headers[normalized_name] = value
| def fset(self, value):
self._headers[normalized_name] = transform(value)
| https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def format_range(value):
"""Formats a range header tuple per the HTTP spec.
Args:
value: ``tuple`` passed to `req.range`
"""
# PERF(kgriffs): % was found to be faster than str.format(),
# string concatenation, and str.join() in this case.
if len(value) == 4:
result = "%s %s-%s... | def format_range(value):
"""Formats a range header tuple per the HTTP spec.
Args:
value: ``tuple`` passed to `req.range`
"""
# PERF(kgriffs): % was found to be faster than str.format(),
# string concatenation, and str.join() in this case.
if len(value) == 4:
return "%s %s-%s/%... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restr... | def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name to set (case-insensitive). Mu... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Cookie and Set-Cookie being the notable exceptions.
... | def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Cookie and Set-Cookie being the notable exceptions.
... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and ... | def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or ``list`` of (*name*, *value*) tuples. Both *name*
and *v... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def __call__(self, env, start_response):
"""WSGI `app` method.
Makes instances of API callable from a WSGI server. May be used to
host an API or called directly in order to simulate requests when
testing the API.
See also PEP 3333.
Args:
env (dict): A WSGI environment dictionary
... | def __call__(self, env, start_response):
"""WSGI `app` method.
Makes instances of API callable from a WSGI server. May be used to
host an API or called directly in order to simulate requests when
testing the API.
See also PEP 3333.
Args:
env (dict): A WSGI environment dictionary
... | https://github.com/falconry/falcon/issues/689 | Traceback (most recent call last):
<...snip...>
File "/usr/local/lib/python2.7/dist-packages/falcon/api.py", line 247, in __call__
start_response(resp.status, headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 167, in start_response_wrapper
check_content_type(status, headers)
File "/usr/lib/python2.7/wsgiref/... | AssertionError |
def default_serialize_error(req, resp, exception):
"""Serialize the given instance of HTTPError.
This function determines which of the supported media types, if
any, are acceptable by the client, and serializes the error
to the preferred type.
Currently, JSON and XML are the only supported media t... | def default_serialize_error(req, resp, exception):
"""Serialize the given instance of HTTPError.
This function determines which of the supported media types, if
any, are acceptable by the client, and serializes the error
to the preferred type.
Currently, JSON and XML are the only supported media t... | https://github.com/falconry/falcon/issues/689 | Traceback (most recent call last):
<...snip...>
File "/usr/local/lib/python2.7/dist-packages/falcon/api.py", line 247, in __call__
start_response(resp.status, headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 167, in start_response_wrapper
check_content_type(status, headers)
File "/usr/lib/python2.7/wsgiref/... | AssertionError |
def _wsgi_headers(self, media_type=None, py2=PY2):
"""Convert headers into the format expected by WSGI servers.
Args:
media_type: Default media type to use for the Content-Type
header if the header was not set explicitly (default ``None``).
"""
headers = self._headers
# PERF(... | def _wsgi_headers(self, media_type=None, py2=PY2):
"""Convert headers into the format expected by WSGI servers.
Args:
media_type: Default media type to use for the Content-Type
header if the header was not set explicitly (default ``None``).
"""
headers = self._headers
# PERF(... | https://github.com/falconry/falcon/issues/689 | Traceback (most recent call last):
<...snip...>
File "/usr/local/lib/python2.7/dist-packages/falcon/api.py", line 247, in __call__
start_response(resp.status, headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 167, in start_response_wrapper
check_content_type(status, headers)
File "/usr/lib/python2.7/wsgiref/... | AssertionError |
def decode(encoded_uri):
"""Decodes percent-encoded characters in a URI or query string.
This function models the behavior of `urllib.parse.unquote_plus`,
albeit in a faster, more straightforward manner.
Args:
encoded_uri (str): An encoded URI (full or partial).
Returns:
str: A de... | def decode(encoded_uri):
"""Decodes percent-encoded characters in a URI or query string.
This function models the behavior of `urllib.parse.unquote_plus`,
albeit in a faster, more straightforward manner.
Args:
encoded_uri (str): An encoded URI (full or partial).
Returns:
str: A de... | https://github.com/falconry/falcon/issues/588 | Traceback (most recent call last):
File "/opt/ads/venv/local/lib/python2.7/site-packages/falcon/api.py", line 154, in __call__
req = self._request_type(env, options=self.req_options)
File "/opt/ads/venv/local/lib/python2.7/site-packages/falcon/request.py", line 237, in __init__
keep_blank_qs_values=self.options.keep_bl... | KeyError |
def __init__(self, env, options=None):
global _maybe_wrap_wsgi_stream
self.env = env
self.options = options if options else RequestOptions()
self._wsgierrors = env["wsgi.errors"]
self.stream = env["wsgi.input"]
self.method = env["REQUEST_METHOD"]
# Normalize path
path = env["PATH_INFO... | def __init__(self, env, options=None):
global _maybe_wrap_wsgi_stream
self.env = env
self.options = options if options else RequestOptions()
if self.context_type is None:
# Literal syntax is more efficient than using dict()
self.context = {}
else:
# pylint will detect this ... | https://github.com/falconry/falcon/issues/567 | Traceback (most recent call last):
File "falcon/api.py", line 154, in falcon.api.API.__call__ (falcon/api.c:1809)
File "falcon/request.py", line 214, in falcon.request.Request.__init__ (falcon/request.c:1629)
File "./dkuim/api.py", line 29, in context
print("Create CONTEXT", req.url)
File "falcon/request.py", line 399,... | AttributeError |
def _add_noise_shaping(dithered_waveform: Tensor, waveform: Tensor) -> Tensor:
r"""Noise shaping is calculated by error:
error[n] = dithered[n] - original[n]
noise_shaped_waveform[n] = dithered[n] + error[n-1]
"""
wf_shape = waveform.size()
waveform = waveform.reshape(-1, wf_shape[-1])
dith... | def _add_noise_shaping(dithered_waveform: Tensor, waveform: Tensor) -> Tensor:
r"""Noise shaping is calculated by error:
error[n] = dithered[n] - original[n]
noise_shaped_waveform[n] = dithered[n] + error[n-1]
"""
wf_shape = waveform.size()
waveform = waveform.reshape(-1, wf_shape[-1])
dith... | https://github.com/pytorch/audio/issues/862 | In [5]: F.dither(torch.randn(2, 441000).cuda())
Out[5]:
tensor([[ 0.3492, -1.8493, -0.4794, ..., 0.3907, -0.4662, -0.2815],
[-1.3821, 0.0401, 0.0023, ..., -1.1417, 0.6411, -0.6851]],
device='cuda:0')
In [6]: F.dither(torch.randn(2, 441000).cuda(), noise_shaping=True)
---------------------------------------------... | RuntimeError |
def _get_strided(
waveform: Tensor, window_size: int, window_shift: int, snip_edges: bool
) -> Tensor:
r"""Given a waveform (1D tensor of size ``num_samples``), it returns a 2D tensor (m, ``window_size``)
representing how the window is shifted along the waveform. Each row is a frame.
Args:
wave... | def _get_strided(
waveform: Tensor, window_size: int, window_shift: int, snip_edges: bool
) -> Tensor:
r"""Given a waveform (1D tensor of size ``num_samples``), it returns a 2D tensor (m, ``window_size``)
representing how the window is shifted along the waveform. Each row is a frame.
Args:
wave... | https://github.com/pytorch/audio/issues/613 | Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dit... | RuntimeError |
def _feature_window_function(
window_type: str,
window_size: int,
blackman_coeff: float,
device: torch.device,
dtype: int,
) -> Tensor:
r"""Returns a window function with the given type and size"""
if window_type == HANNING:
return torch.hann_window(
window_size, periodic... | def _feature_window_function(
window_type: str, window_size: int, blackman_coeff: float
) -> Tensor:
r"""Returns a window function with the given type and size"""
if window_type == HANNING:
return torch.hann_window(window_size, periodic=False)
elif window_type == HAMMING:
return torch.ha... | https://github.com/pytorch/audio/issues/613 | Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dit... | RuntimeError |
def _get_log_energy(
strided_input: Tensor, epsilon: Tensor, energy_floor: float
) -> Tensor:
r"""Returns the log energy of size (m) for a strided_input (m,*)"""
device, dtype = strided_input.device, strided_input.dtype
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
i... | def _get_log_energy(
strided_input: Tensor, epsilon: Tensor, energy_floor: float
) -> Tensor:
r"""Returns the log energy of size (m) for a strided_input (m,*)"""
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
if energy_floor == 0.0:
return log_energy
else:
... | https://github.com/pytorch/audio/issues/613 | Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dit... | RuntimeError |
def _get_window(
waveform: Tensor,
padded_window_size: int,
window_size: int,
window_shift: int,
window_type: str,
blackman_coeff: float,
snip_edges: bool,
raw_energy: bool,
energy_floor: float,
dither: float,
remove_dc_offset: bool,
preemphasis_coefficient: float,
) -> T... | def _get_window(
waveform: Tensor,
padded_window_size: int,
window_size: int,
window_shift: int,
window_type: str,
blackman_coeff: float,
snip_edges: bool,
raw_energy: bool,
energy_floor: float,
dither: float,
remove_dc_offset: bool,
preemphasis_coefficient: float,
) -> T... | https://github.com/pytorch/audio/issues/613 | Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dit... | RuntimeError |
def fbank(
waveform: Tensor,
blackman_coeff: float = 0.42,
channel: int = -1,
dither: float = 0.0,
energy_floor: float = 1.0,
frame_length: float = 25.0,
frame_shift: float = 10.0,
high_freq: float = 0.0,
htk_compat: bool = False,
low_freq: float = 20.0,
min_duration: float =... | def fbank(
waveform: Tensor,
blackman_coeff: float = 0.42,
channel: int = -1,
dither: float = 0.0,
energy_floor: float = 1.0,
frame_length: float = 25.0,
frame_shift: float = 10.0,
high_freq: float = 0.0,
htk_compat: bool = False,
low_freq: float = 20.0,
min_duration: float =... | https://github.com/pytorch/audio/issues/613 | Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dit... | RuntimeError |
def _get_LR_indices_and_weights(
orig_freq: float,
new_freq: float,
output_samples_in_unit: int,
window_width: float,
lowpass_cutoff: float,
lowpass_filter_width: int,
device: torch.device,
dtype: int,
) -> Tuple[Tensor, Tensor]:
r"""Based on LinearResample::SetIndexesAndWeights wher... | def _get_LR_indices_and_weights(
orig_freq: float,
new_freq: float,
output_samples_in_unit: int,
window_width: float,
lowpass_cutoff: float,
lowpass_filter_width: int,
) -> Tuple[Tensor, Tensor]:
r"""Based on LinearResample::SetIndexesAndWeights where it retrieves the weights for
resampl... | https://github.com/pytorch/audio/issues/613 | Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dit... | RuntimeError |
def resample_waveform(
waveform: Tensor, orig_freq: float, new_freq: float, lowpass_filter_width: int = 6
) -> Tensor:
r"""Resamples the waveform at the new frequency. This matches Kaldi's OfflineFeatureTpl ResampleWaveform
which uses a LinearResample (resample a signal at linearly spaced intervals to upsam... | def resample_waveform(
waveform: Tensor, orig_freq: float, new_freq: float, lowpass_filter_width: int = 6
) -> Tensor:
r"""Resamples the waveform at the new frequency. This matches Kaldi's OfflineFeatureTpl ResampleWaveform
which uses a LinearResample (resample a signal at linearly spaced intervals to upsam... | https://github.com/pytorch/audio/issues/613 | Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dit... | RuntimeError |
async def _run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[socket.socket] = None,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,... | async def _run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[socket.socket] = None,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,... | https://github.com/aio-libs/aiohttp/issues/5127 | File "C:\_dev\.venv\lib\site-packages\aiohttp\web.py", line 379, in _run_app
if sys.platform == "win32" and sys.version_info < 3.8:
TypeError: '<' not supported between instances of 'sys.version_info' and 'float' | TypeError |
async def sendfile(self) -> None:
assert self.transport is not None
loop = self.loop
data = b"".join(self._sendfile_buffer)
if hasattr(loop, "sendfile"):
# Python 3.7+
self.transport.write(data)
if self._count != 0:
await loop.sendfile(self.transport, self._fobj, self... | async def sendfile(self) -> None:
assert self.transport is not None
loop = self.loop
data = b"".join(self._sendfile_buffer)
if hasattr(loop, "sendfile"):
# Python 3.7+
self.transport.write(data)
await loop.sendfile(self.transport, self._fobj, self._offset, self._count)
aw... | https://github.com/aio-libs/aiohttp/issues/5124 | Traceback (most recent call last):
File "/__w/core/core/venv/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 461, in start
resp, reset = await task
File "/__w/core/core/venv/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 414, in _handle_request
reset = await self.finish_response(request, resp, st... | ValueError |
async def resolve(
self, hostname: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
infos = await self._loop.getaddrinfo(
hostname, port, type=socket.SOCK_STREAM, family=family
)
hosts = []
for family, _, proto, _, address in infos:
if family == socket.AF_... | async def resolve(
self, host: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
infos = await self._loop.getaddrinfo(
host, port, type=socket.SOCK_STREAM, family=family
)
hosts = []
for family, _, proto, _, address in infos:
if family == socket.AF_INET6 an... | https://github.com/aio-libs/aiohttp/issues/5110 | Traceback (most recent call last):
File "/home/lilydjwg/tmpfs/venv/lib/python3.8/site-packages/aiohttp/connector.py", line 946, in _wrap_create_connection
return await self._loop.create_connection(*args, **kwargs) # type: ignore # noqa
File "/usr/lib/python3.8/asyncio/base_events.py", line 1050, in create_connection
... | ssl.SSLCertVerificationError |
def is_connected(self) -> bool:
return self.transport is not None and not self.transport.is_closing()
| def is_connected(self) -> bool:
return self.transport is not None
| https://github.com/aio-libs/aiohttp/issues/4587 | Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client.py", line 502, in _request
resp = await req.send(conn)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client_reqrep.py", line 629, in send
await writer.write_headers(s... | ConnectionResetError |
def _cleanup(self) -> None:
"""Cleanup unused transports."""
if self._cleanup_handle:
self._cleanup_handle.cancel()
now = self._loop.time()
timeout = self._keepalive_timeout
if self._conns:
connections = {}
deadline = now - timeout
for key, conns in self._conns.item... | def _cleanup(self) -> None:
"""Cleanup unused transports."""
if self._cleanup_handle:
self._cleanup_handle.cancel()
now = self._loop.time()
timeout = self._keepalive_timeout
if self._conns:
connections = {}
deadline = now - timeout
for key, conns in self._conns.item... | https://github.com/aio-libs/aiohttp/issues/4587 | Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client.py", line 502, in _request
resp = await req.send(conn)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client_reqrep.py", line 629, in send
await writer.write_headers(s... | ConnectionResetError |
def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
try:
conns = self._conns[key]
except KeyError:
return None
t1 = self._loop.time()
while conns:
proto, t0 = conns.pop()
if proto.is_connected():
if t1 - t0 > self._keepalive_timeout:
... | def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
try:
conns = self._conns[key]
except KeyError:
return None
t1 = self._loop.time()
while conns:
proto, t0 = conns.pop()
if proto.is_connected():
if t1 - t0 > self._keepalive_timeout:
... | https://github.com/aio-libs/aiohttp/issues/4587 | Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client.py", line 502, in _request
resp = await req.send(conn)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client_reqrep.py", line 629, in send
await writer.write_headers(s... | ConnectionResetError |
def feed_data(
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
) -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
required = self._length
chunk_len = len(chunk)
if required >= chunk_len:
self._length = re... | def feed_data(
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
) -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
required = self._length
chunk_len = len(chunk)
if required >= chunk_len:
self._length = re... | https://github.com/aio-libs/aiohttp/issues/4630 | (False, b'')
(False, b'')
Traceback (most recent call last):
File "aiohttp-test.py", line 32, in <module>
print(repr(parser.feed_eof()))
File ".../lib/python3.6/site-packages/aiohttp/http_parser.py", line 575, in feed_eof
"Not enough data for satisfy transfer length header.")
aiohttp.http_exceptions.TransferEncodingErr... | aiohttp.http_exceptions.TransferEncodingError |
async def text(self) -> str:
"""Return BODY as text using encoding from .charset."""
bytes_body = await self.read()
encoding = self.charset or "utf-8"
try:
return bytes_body.decode(encoding)
except LookupError:
raise HTTPUnsupportedMediaType()
| async def text(self) -> str:
"""Return BODY as text using encoding from .charset."""
bytes_body = await self.read()
encoding = self.charset or "utf-8"
return bytes_body.decode(encoding)
| https://github.com/aio-libs/aiohttp/issues/3562 | Error handling request
Traceback (most recent call last):
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 418, in start
resp = await task
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_app.py", line 458, in _handle
resp = await handler(request)
File ... | LookupError |
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
"""Return POST parameters."""
if self._post is not None:
return self._post
if self._method not in self.POST_METHODS:
self._post = MultiDictProxy(MultiDict())
return self._post
content_type = self.content_typ... | async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
"""Return POST parameters."""
if self._post is not None:
return self._post
if self._method not in self.POST_METHODS:
self._post = MultiDictProxy(MultiDict())
return self._post
content_type = self.content_typ... | https://github.com/aio-libs/aiohttp/issues/3562 | Error handling request
Traceback (most recent call last):
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 418, in start
resp = await task
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_app.py", line 458, in _handle
resp = await handler(request)
File ... | LookupError |
async def start(self) -> None:
"""Process incoming request.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed alway... | async def start(self) -> None:
"""Process incoming request.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed alway... | https://github.com/aio-libs/aiohttp/issues/3648 | Unhandled exception
Traceback (most recent call last):
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/web_protocol.py", line 448, in start
await resp.write_eof()
File "/usr/lib/python3.6/asyncio/coroutines.py", line 110, in __next__
return self.gen.send(None)
File "/home/decaz/.virtuale... | ConnectionResetError |
async def start(self) -> None:
"""Process incoming request.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed alway... | async def start(self) -> None:
"""Process incoming request.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed alway... | https://github.com/aio-libs/aiohttp/issues/3648 | Unhandled exception
Traceback (most recent call last):
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/web_protocol.py", line 448, in start
await resp.write_eof()
File "/usr/lib/python3.6/asyncio/coroutines.py", line 110, in __next__
return self.gen.send(None)
File "/home/decaz/.virtuale... | ConnectionResetError |
def data_received(self, data):
if self._force_close or self._close:
return
# parse http messages
if self._payload_parser is None and not self._upgrade:
try:
messages, upgraded, tail = self._request_parser.feed_data(data)
except HttpProcessingError as exc:
# s... | def data_received(self, data):
if self._force_close or self._close:
return
# parse http messages
if self._payload_parser is None and not self._upgrade:
try:
messages, upgraded, tail = self._request_parser.feed_data(data)
except HttpProcessingError as exc:
# s... | https://github.com/aio-libs/aiohttp/issues/2752 | Task exception was never retrieved
future: <Task finished coro=<RequestHandler.start() done, defined at C:\Program Files\Python36\lib\site-packages\aiohttp\web_protocol.py:340> exception=IndexError('pop from an empty deque',)>
Traceback (most recent call last):
File "C:\Program Files\Python36\lib\site-packages\aiohttp\... | IndexError |
def make_handler(self, app):
if hasattr(self.cfg, "debug"):
is_debug = self.cfg.debug
else:
is_debug = self.log.loglevel == logging.DEBUG
return app.make_handler(
logger=self.log,
debug=is_debug,
slow_request_timeout=self.cfg.timeout,
keepalive_timeout=self.c... | def make_handler(self, app):
return app.make_handler(
logger=self.log,
debug=self.cfg.debug,
timeout=self.cfg.timeout,
keep_alive=self.cfg.keepalive,
access_log=self.log.access_log,
access_log_format=self._get_valid_log_format(self.cfg.access_log_format),
)
| https://github.com/aio-libs/aiohttp/issues/1148 | Traceback (most recent call last):
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/gunicorn/arbiter.py", line 557, in spawn_worker
worker.init_process()
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/aiohttp/worker.py", line 196, in init... | AttributeError |
def add_route(self, method, path, handler, *, name=None):
assert path.startswith("/")
assert callable(handler), handler
if not asyncio.iscoroutinefunction(handler):
handler = asyncio.coroutine(handler)
method = method.upper()
assert method in self.METHODS, method
parts = []
factory =... | def add_route(self, method, path, handler, *, name=None):
assert path.startswith("/")
assert callable(handler), handler
if not asyncio.iscoroutinefunction(handler):
handler = asyncio.coroutine(handler)
method = method.upper()
assert method in self.METHODS, method
parts = []
factory =... | https://github.com/aio-libs/aiohttp/issues/264 | from aiohttp.web import UrlDispatcher
disp = UrlDispatcher()
disp.add_route('GET', '/{num:^\d+}', lambda x: x, name='name')
<DynamicRoute 'name' [GET] /{num:^\d+} -> <function <lambda> at 0x7f0223131840>
disp['name'].url(parts={'num': '1'})
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/h... | ValueError |
def add_fields(self, *fields):
to_add = list(fields)
while to_add:
rec = to_add.pop(0)
if isinstance(rec, io.IOBase):
k = guess_filename(rec, "unknown")
self.add_field(k, rec)
self._has_io = True
elif isinstance(rec, multidict.MultiDict):
... | def add_fields(self, *fields):
for rec in fields:
if isinstance(rec, io.IOBase):
k = guess_filename(rec, "unknown")
self.add_field(k, rec)
self._has_io = True
elif len(rec) == 1:
k = guess_filename(rec[0], "unknown")
self.add_field(k, rec[... | https://github.com/aio-libs/aiohttp/issues/114 | Traceback (most recent call last):
File "bug.py", line 7, in <module>
loop.run_until_complete(aiohttp.request('POST', "http://httpbin.org/post", data=d))
File "/usr/lib64/python3.4/asyncio/base_events.py", line 208, in run_until_complete
return future.result()
File "/usr/lib64/python3.4/asyncio/futures.py", line 243, i... | AssertionError |
def map_network(self, pool_size=255):
"""
Maps the network
:param pool_size: amount of parallel ping processes
:return: list of valid ip addresses
"""
ip_list = list()
# get my IP and compose a base like 192.168.1.xxx
ip_parts = self.get_my_ip().split(".")
base_ip = ip_parts[0] + ".... | def map_network(self, pool_size=255):
"""
Maps the network
:param pool_size: amount of parallel ping processes
:return: list of valid ip addresses
"""
if not self.ping:
print(
"Error: `ping` executable not found. Please enter the IP "
"address in the text box manu... | https://github.com/srevinsaju/guiscrcpy/issues/164 | Process Process-232:
Traceback (most recent call last):
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/process.py", line 297, in _bootstrap
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/process.py"... | OSError |
def devices_detailed(increment=""):
if increment is None:
raise FileNotFoundError(
"guiscrcpy couldn't find adb. "
"Please specify path to adb in configuration filename"
)
proc = Popen(_(increment + " devices -l"), stdout=PIPE)
output = [[y.strip() for y in x.split()]... | def devices_detailed(increment=""):
if increment is None:
raise FileNotFoundError(
"guiscrcpy couldn't find adb. "
"Please specify path to adb in configuration filename"
)
proc = Popen(_(increment + " devices -l"), stdout=PIPE)
output = [[y.strip() for y in x.split()]... | https://github.com/srevinsaju/guiscrcpy/issues/117 | me@host:~$ Downloads/guiscrcpy-x86_64.AppImage
guiscrcpy
by srevinsaju
3.7.post195.dev
Licensed under GNU GPL v3 (c) 2020
MSG: Please ensure you have enabled USB Debugging on your device. See README.md for more details
(python3.8:31901): dbind-WARNING **: 10:21:56.701: Couldn't register with accessibility bus: Di... | IndexError |
def devices_detailed(increment=""):
if increment is None:
raise FileNotFoundError(
"guiscrcpy couldn't find adb. "
"Please specify path to adb in configuration filename"
)
proc = Popen(_(increment + " devices -l"), stdout=PIPE)
output = [[y.strip() for y in x.split()]... | def devices_detailed(increment=""):
if increment is None:
raise FileNotFoundError(
"guiscrcpy couldn't find adb. "
"Please specify path to adb in configuration filename"
)
proc = Popen(_(increment + " devices -l"), stdout=PIPE)
output = [[y.strip() for y in x.split()]... | https://github.com/srevinsaju/guiscrcpy/issues/117 | me@host:~$ Downloads/guiscrcpy-x86_64.AppImage
guiscrcpy
by srevinsaju
3.7.post195.dev
Licensed under GNU GPL v3 (c) 2020
MSG: Please ensure you have enabled USB Debugging on your device. See README.md for more details
(python3.8:31901): dbind-WARNING **: 10:21:56.701: Couldn't register with accessibility bus: Di... | IndexError |
def scan_devices_update_list_view(self):
"""
Scan for new devices; and update the list view
:return:
"""
# self.devices_view.clear()
paired_devices = []
for index in range(self.devices_view.count()):
paired_devices.append(self.devices_view.item(index))
devices = adb.devices_deta... | def scan_devices_update_list_view(self):
"""
Scan for new devices; and update the list view
:return:
"""
# self.devices_view.clear()
paired_devices = []
for index in range(self.devices_view.count()):
paired_devices.append(self.devices_view.item(index))
devices = adb.devices_deta... | https://github.com/srevinsaju/guiscrcpy/issues/117 | me@host:~$ Downloads/guiscrcpy-x86_64.AppImage
guiscrcpy
by srevinsaju
3.7.post195.dev
Licensed under GNU GPL v3 (c) 2020
MSG: Please ensure you have enabled USB Debugging on your device. See README.md for more details
(python3.8:31901): dbind-WARNING **: 10:21:56.701: Couldn't register with accessibility bus: Di... | IndexError |
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily th... | def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily th... | https://github.com/maurosoria/dirsearch/issues/106 | Traceback (most recent call last):
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 192, in _decode
data = self._decoder.decompress(data)
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 58, in decompress
return self._obj.decompress(dat... | thirdparty.requests.packages.urllib3.exceptions.DecodeError |
def generate():
# Special case for urllib3.
if hasattr(self.raw, "stream"):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
... | def generate():
# Special case for urllib3.
if hasattr(self.raw, "stream"):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
... | https://github.com/maurosoria/dirsearch/issues/106 | Traceback (most recent call last):
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 192, in _decode
data = self._decoder.decompress(data)
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 58, in decompress
return self._obj.decompress(dat... | thirdparty.requests.packages.urllib3.exceptions.DecodeError |
def _evaluate_cityscapes(self, results, logger, imgfile_prefix):
"""Evaluation in Cityscapes protocol.
Args:
results (list): Testing results of the dataset.
logger (logging.Logger | str | None): Logger used for printing
related information during evaluation. Default: None.
i... | def _evaluate_cityscapes(self, results, logger, imgfile_prefix):
"""Evaluation in Cityscapes protocol.
Args:
results (list): Testing results of the dataset.
logger (logging.Logger | str | None): Logger used for printing
related information during evaluation. Default: None.
i... | https://github.com/open-mmlab/mmsegmentation/issues/89 | Traceback (most recent call last):
File "tools/test.py", line 142, in <module>
main()
File "tools/test.py", line 120, in main
outputs = single_gpu_test(model, data_loader, args.show, args.show_dir)
File "/root/code/mmsegmentation/mmseg/apis/test.py", line 62, in single_gpu_test
out_file=out_file)
File "/root/code/mmseg... | ValueError |
def slide_inference(self, img, img_meta, rescale):
"""Inference by sliding-window with overlap."""
h_stride, w_stride = self.test_cfg.stride
h_crop, w_crop = self.test_cfg.crop_size
batch_size, _, h_img, w_img = img.size()
assert h_crop <= h_img and w_crop <= w_img, (
"crop size should not ... | def slide_inference(self, img, img_meta, rescale):
"""Inference by sliding-window with overlap."""
h_stride, w_stride = self.test_cfg.stride
h_crop, w_crop = self.test_cfg.crop_size
batch_size, _, h_img, w_img = img.size()
assert h_crop <= h_img and w_crop <= w_img, (
"crop size should not ... | https://github.com/open-mmlab/mmsegmentation/issues/89 | Traceback (most recent call last):
File "tools/test.py", line 142, in <module>
main()
File "tools/test.py", line 120, in main
outputs = single_gpu_test(model, data_loader, args.show, args.show_dir)
File "/root/code/mmsegmentation/mmseg/apis/test.py", line 62, in single_gpu_test
out_file=out_file)
File "/root/code/mmseg... | ValueError |
def parse_args():
parser = argparse.ArgumentParser(description="Train a segmentor")
parser.add_argument("config", help="train config file path")
parser.add_argument("--work-dir", help="the dir to save logs and models")
parser.add_argument("--load-from", help="the checkpoint file to load weights from")
... | def parse_args():
parser = argparse.ArgumentParser(description="Train a segmentor")
parser.add_argument("config", help="train config file path")
parser.add_argument("--work_dir", help="the dir to save logs and models")
parser.add_argument("--load-from", help="the checkpoint file to load weights from")
... | https://github.com/open-mmlab/mmsegmentation/issues/89 | Traceback (most recent call last):
File "tools/test.py", line 142, in <module>
main()
File "tools/test.py", line 120, in main
outputs = single_gpu_test(model, data_loader, args.show, args.show_dir)
File "/root/code/mmsegmentation/mmseg/apis/test.py", line 62, in single_gpu_test
out_file=out_file)
File "/root/code/mmseg... | ValueError |
def _load_config(self) -> None:
"""Load config, monitors, alerters and loggers."""
config = EnvironmentAwareConfigParser()
if not self._config_file.exists():
raise RuntimeError(
"Configuration file {} does not exist".format(self._config_file)
)
config.read(self._config_file)... | def _load_config(self) -> None:
"""Load config, monitors, alerters and loggers."""
config = EnvironmentAwareConfigParser()
if not self._config_file.exists():
raise RuntimeError(
"Configuration file {} does not exist".format(self._config_file)
)
config.read(self._config_file)... | https://github.com/jamesoff/simplemonitor/issues/617 | % simplemonitor -t
[...]
2020-09-10 10:12:46 INFO (simplemonitor) Starting remote listener thread (allowing pickle data)
Traceback (most recent call last):
File "/root/.local/share/virtualenvs/sm-pip-9yYbf1A2/bin/simplemonitor", line 11, in <module>
load_entry_point('simplemonitor', 'console_scripts', 'simplemonito... | OSError |
def run(self) -> None:
self._create_pid_file()
self._start_network_thread()
module_logger.info(
"=== Starting... (loop runs every %ds) Hit ^C to stop", self.interval
)
loop = True
loops = self._max_loops
heartbeat = True
while loop:
try:
if loops > 0:
... | def run(self) -> None:
self._create_pid_file()
module_logger.info(
"=== Starting... (loop runs every %ds) Hit ^C to stop", self.interval
)
loop = True
loops = self._max_loops
heartbeat = True
while loop:
try:
if loops > 0:
loops -= 1
... | https://github.com/jamesoff/simplemonitor/issues/617 | % simplemonitor -t
[...]
2020-09-10 10:12:46 INFO (simplemonitor) Starting remote listener thread (allowing pickle data)
Traceback (most recent call last):
File "/root/.local/share/virtualenvs/sm-pip-9yYbf1A2/bin/simplemonitor", line 11, in <module>
load_entry_point('simplemonitor', 'console_scripts', 'simplemonito... | OSError |
def log_result(self, logger: Logger) -> None:
"""Use the given logger object to log our state."""
logger.check_dependencies(self.failed + self.still_failing + self.skipped)
with logger:
for key, monitor in self.monitors.items():
if check_group_match(monitor.group, logger.groups):
... | def log_result(self, logger: Logger) -> None:
"""Use the given logger object to log our state."""
logger.check_dependencies(self.failed + self.still_failing + self.skipped)
with logger:
for key, monitor in self.monitors.items():
if check_group_match(monitor.group, logger.groups):
... | https://github.com/jamesoff/simplemonitor/issues/623 | 2020-09-15 23:30:11 ERROR (simplemonitor) Caught unhandled exception during main loop
Traceback (most recent call last):
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 766, in run
self.run_loop()
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 735, in run_loop
self.do... | RuntimeError |
def do_alert(self, alerter: Alerter) -> None:
"""Use the given alerter object to send an alert, if needed."""
alerter.check_dependencies(self.failed + self.still_failing + self.skipped)
for name, this_monitor in list(self.monitors.items()):
# Don't generate alerts for monitors which want it done rem... | def do_alert(self, alerter: Alerter) -> None:
"""Use the given alerter object to send an alert, if needed."""
alerter.check_dependencies(self.failed + self.still_failing + self.skipped)
for name, this_monitor in list(self.monitors.items()):
# Don't generate alerts for monitors which want it done rem... | https://github.com/jamesoff/simplemonitor/issues/623 | 2020-09-15 23:30:11 ERROR (simplemonitor) Caught unhandled exception during main loop
Traceback (most recent call last):
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 766, in run
self.run_loop()
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 735, in run_loop
self.do... | RuntimeError |
def __init__(self, allow_pickle=True):
"""Main class turn on."""
self.allow_pickle = allow_pickle
self.monitors = {}
self.failed = []
self.still_failing = []
self.skipped = []
self.warning = []
self.remote_monitors = {}
self.loggers = {}
self.alerters = {}
try:
sign... | def __init__(self, allow_pickle=True):
"""Main class turn on."""
self.allow_pickle = allow_pickle
self.monitors = {}
self.failed = []
self.still_failing = []
self.skipped = []
self.warning = []
self.remote_monitors = {}
self.loggers = {}
self.alerters = {}
try:
sign... | https://github.com/jamesoff/simplemonitor/issues/216 | more monitors.ini
[monitor]
interval=60
[localhost]
type=host
host=localhost
tolerance=2
python monitor.py -v
Warning: --verbose is deprecated; use --log-level=info
2019-01-18 13:23:28 INFO (simplemonitor) === SimpleMonitor v1.7
2019-01-18 13:23:28 INFO (simplemonitor) Loading main config from monitors.ini
20... | AttributeError |
def makeYbus(baseMVA, bus, branch):
"""Builds the bus admittance matrix and branch admittance matrices.
Returns the full bus admittance matrix (i.e. for all buses) and the
matrices C{Yf} and C{Yt} which, when multiplied by a complex voltage
vector, yield the vector currents injected into each line from... | def makeYbus(baseMVA, bus, branch):
"""Builds the bus admittance matrix and branch admittance matrices.
Returns the full bus admittance matrix (i.e. for all buses) and the
matrices C{Yf} and C{Yt} which, when multiplied by a complex voltage
vector, yield the vector currents injected into each line from... | https://github.com/e2nIEE/pandapower/issues/780 | import pandapower as pp
import pandapower.networks as ppnw
net = ppnw.case9()
b = pp.create_bus(net, vn_kv=100)
pp.create_ext_grid(net, b)
1
pp.create_load(net, b, p_mw=10)
3
pp.runpp(net)
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "/Users/michael/Dropbox/mac_git/pandapower/pandapower/r... | ValueError |
def db_to_form_package_schema():
schema = default_package_schema()
schema.update(
{
"tags": {
"__extras": [
ckan.lib.navl.validators.keep_extras,
ckan.logic.converters.free_tags_only,
]
},
}
)
... | def db_to_form_package_schema():
schema = default_package_schema()
schema.update(
{
"tags": {
"__extras": [
ckan.lib.navl.validators.keep_extras,
ckan.logic.converters.free_tags_only,
]
},
}
)
... | https://github.com/ckan/ckan/issues/396 | nosetests --ckan --with-pylons=test-core.ini ckan ξ² ξ idatasetform-fixes
.........................................................................................................................................................S.S.S.S.S.S..............................................................F.F........... | KeyError |
def db_to_form_package_schema():
schema = default_package_schema()
schema.update(
{
"tags": {
"__extras": [
ckan.lib.navl.validators.keep_extras,
ckan.logic.converters.free_tags_only,
]
},
}
)
... | def db_to_form_package_schema():
schema = default_package_schema()
schema.update(
{
"tags": {
"__extras": [
ckan.lib.navl.validators.keep_extras,
ckan.logic.converters.free_tags_only,
]
},
}
)
... | https://github.com/ckan/ckan/issues/396 | nosetests --ckan --with-pylons=test-core.ini ckan ξ² ξ idatasetform-fixes
.........................................................................................................................................................S.S.S.S.S.S..............................................................F.F........... | KeyError |
def post(self, sketch_id):
"""Handles POST request to the resource.
Handler for /api/v1/sketches/:sketch_id/explore/
Args:
sketch_id: Integer primary key for a sketch database model
Returns:
JSON with list of matched events
"""
sketch = Sketch.query.get_with_acl(sketch_id)
... | def post(self, sketch_id):
"""Handles POST request to the resource.
Handler for /api/v1/sketches/:sketch_id/explore/
Args:
sketch_id: Integer primary key for a sketch database model
Returns:
JSON with list of matched events
"""
sketch = Sketch.query.get_with_acl(sketch_id)
... | https://github.com/google/timesketch/issues/1564 | [2021-01-21 09:57:16,813] celery.worker.strategy/INFO Received task: timesketch.lib.tasks.run_csv_jsonl[34b8f46651b94a8d8ad9e29bb06a147c]
[2021-01-21 09:57:16,819] timesketch.tasks/INFO Index timeline [vol timeline] to index [34b8f46651b94a8d8ad9e29bb06a147c] (source: jsonl)
[2021-01-21 09:58:14,763] timesketch.elastic... | urllib3.exceptions.ReadTimeoutError |
def __init__(self, host="127.0.0.1", port=9200):
"""Create a Elasticsearch client."""
super().__init__()
self._error_container = {}
self.user = current_app.config.get("ELASTIC_USER", "user")
self.password = current_app.config.get("ELASTIC_PASSWORD", "pass")
self.ssl = current_app.config.get("EL... | def __init__(self, host="127.0.0.1", port=9200):
"""Create a Elasticsearch client."""
super().__init__()
self._error_container = {}
self.user = current_app.config.get("ELASTIC_USER", "user")
self.password = current_app.config.get("ELASTIC_PASSWORD", "pass")
self.ssl = current_app.config.get("EL... | https://github.com/google/timesketch/issues/1564 | [2021-01-21 09:57:16,813] celery.worker.strategy/INFO Received task: timesketch.lib.tasks.run_csv_jsonl[34b8f46651b94a8d8ad9e29bb06a147c]
[2021-01-21 09:57:16,819] timesketch.tasks/INFO Index timeline [vol timeline] to index [34b8f46651b94a8d8ad9e29bb06a147c] (source: jsonl)
[2021-01-21 09:58:14,763] timesketch.elastic... | urllib3.exceptions.ReadTimeoutError |
def flush_queued_events(self, retry_count=0):
"""Flush all queued events.
Returns:
dict: A dict object that contains the number of events
that were sent to Elastic as well as information
on whether there were any errors, and what the
details of these errors if any.
... | def flush_queued_events(self, retry_count=0):
"""Flush all queued events.
Returns:
dict: A dict object that contains the number of events
that were sent to Elastic as well as information
on whether there were any errors, and what the
details of these errors if any.
... | https://github.com/google/timesketch/issues/1564 | [2021-01-21 09:57:16,813] celery.worker.strategy/INFO Received task: timesketch.lib.tasks.run_csv_jsonl[34b8f46651b94a8d8ad9e29bb06a147c]
[2021-01-21 09:57:16,819] timesketch.tasks/INFO Index timeline [vol timeline] to index [34b8f46651b94a8d8ad9e29bb06a147c] (source: jsonl)
[2021-01-21 09:58:14,763] timesketch.elastic... | urllib3.exceptions.ReadTimeoutError |
def run(self, name, index, username):
"""Create the SearchIndex."""
es = ElasticsearchDataStore(
host=current_app.config["ELASTIC_HOST"], port=current_app.config["ELASTIC_PORT"]
)
user = User.query.filter_by(username=username).first()
if not user:
sys.stderr.write("User does not exis... | def run(self, name, index, username):
"""Create the SearchIndex."""
es = ElasticsearchDataStore(
host=current_app.config["ELASTIC_HOST"], port=current_app.config["ELASTIC_PORT"]
)
user = User.query.filter_by(username=username).first()
if not user:
sys.stderr.write("User does not exis... | https://github.com/google/timesketch/issues/1093 | tsctl import -f <timeline>.jsonl -u <username>
Traceback (most recent call last):
File "/usr/lib/python3.6/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
TypeError: a bytes-like object is required, not 'NoneType'
The above exception was the direct cause of the following excepti... | TypeError |
def run(self, file_path, sketch_id, username, timeline_name):
"""This is the run method."""
file_path = os.path.realpath(file_path)
file_path_no_extension, extension = os.path.splitext(file_path)
extension = extension.lstrip(".")
filename = os.path.basename(file_path_no_extension)
supported_ex... | def run(self, file_path, sketch_id, username, timeline_name):
"""This is the run method."""
file_path = os.path.realpath(file_path)
file_path_no_extension, extension = os.path.splitext(file_path)
extension = extension.lstrip(".")
filename = os.path.basename(file_path_no_extension)
supported_ex... | https://github.com/google/timesketch/issues/1093 | tsctl import -f <timeline>.jsonl -u <username>
Traceback (most recent call last):
File "/usr/lib/python3.6/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
TypeError: a bytes-like object is required, not 'NoneType'
The above exception was the direct cause of the following excepti... | TypeError |
def add_file(self, filepath, delimiter=","):
"""Add a CSV, JSONL or a PLASO file to the buffer.
Args:
filepath: the path to the file to add.
delimiter: if this is a CSV file then a delimiter can be defined.
Raises:
TypeError: if the entry does not fulfill requirements.
"""
... | def add_file(self, filepath, delimiter=","):
"""Add a CSV, JSONL or a PLASO file to the buffer.
Args:
filepath: the path to the file to add.
delimiter: if this is a CSV file then a delimiter can be defined.
Raises:
TypeError: if the entry does not fulfill requirements.
"""
... | https://github.com/google/timesketch/issues/1017 | Traceback (most recent call last):
File "...lib/python2.7/site-packages/timesketch/lib/tasks.py", line 467, in run_csv_jsonl
for event in read_and_validate(source_file_path):
File ".../lib/python2.7/site-packages/timesketch/lib/utils.py", line 81, in read_and_validate_csv
for row in reader:
File "/usr/lib/python2.7/csv... | UnicodeEncodeError |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.