in_source_id stringlengths 13 58 | issue stringlengths 3 241k | before_files listlengths 0 3 | after_files listlengths 0 3 | pr_diff stringlengths 109 107M ⌀ |
|---|---|---|---|---|
carpentries__amy-696 | Can't assign person to something when lookup fails
This is direct cause for this error:
```
Internal Server Error: /workshops/request/65/assign
Traceback (most recent call last):
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/core/handlers/base.py", line 132, in get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/contrib/auth/decorators.py", line 22, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/contrib/auth/decorators.py", line 22, in _wrapped_view
return view_func(request, *args, **kwargs)
File "./workshops/views.py", line 1989, in eventrequest_assign
assign(request, event_req, person_id)
File "./workshops/util.py", line 737, in assign
person = Person.objects.get(pk=person_id)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/manager.py", line 127, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/query.py", line 325, in get
clone = self.filter(*args, **kwargs)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/query.py", line 679, in filter
return self._filter_or_exclude(False, *args, **kwargs)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/query.py", line 697, in _filter_or_exclude
clone.query.add_q(Q(*args, **kwargs))
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/sql/query.py", line 1310, in add_q
clause, require_inner = self._add_q(where_part, self.used_aliases)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/sql/query.py", line 1338, in _add_q
allow_joins=allow_joins, split_subq=split_subq,
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/sql/query.py", line 1209, in build_filter
condition = self.build_lookup(lookups, col, value)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/sql/query.py", line 1102, in build_lookup
return final_lookup(lhs, rhs)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/lookups.py", line 101, in __init__
self.rhs = self.get_prep_lookup()
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/lookups.py", line 139, in get_prep_lookup
return self.lhs.output_field.get_prep_lookup(self.lookup_name, self.rhs)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/fields/__init__.py", line 727, in get_prep_lookup
return self.get_prep_value(value)
File "/home/amy/amy_site/venv/lib/python3.4/site-packages/django/db/models/fields/__init__.py", line 985, in get_prep_value
return int(value)
ValueError: invalid literal for int() with base 10: ''
```
There needs to be a "get-or-404" mechanism in `workshops.util.assign` - it will prevent this error.
| [
{
"content": "# coding: utf-8\nfrom collections import namedtuple, defaultdict\nimport csv\nimport datetime\nfrom itertools import chain\nimport re\nimport yaml\n\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.core.paginator import (\n EmptyPage, PageNotAnInteger, Paginator as DjangoPagi... | [
{
"content": "# coding: utf-8\nfrom collections import namedtuple, defaultdict\nimport csv\nimport datetime\nfrom itertools import chain\nimport re\nimport yaml\n\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.core.paginator import (\n EmptyPage, PageNotAnInteger, Paginator as DjangoPagi... | diff --git a/workshops/test/test_util.py b/workshops/test/test_util.py
index d4e33f889..ad4d06371 100644
--- a/workshops/test/test_util.py
+++ b/workshops/test/test_util.py
@@ -5,8 +5,9 @@
from django.contrib.auth.models import Group
from django.contrib.sessions.serializers import JSONSerializer
-from django.test import TestCase, RequestFactory
from django.core.urlresolvers import reverse
+from django.http import Http404
+from django.test import TestCase, RequestFactory
from ..models import Host, Event, Role, Person, Task, Badge, Award
from ..util import (
@@ -23,6 +24,7 @@
create_username,
InternalError,
Paginator,
+ assign,
)
from .base import TestBase
@@ -1008,3 +1010,62 @@ def test_long_no_breaks(self):
# None is a break, it appears as '...' in the paginator widget
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17]
)
+
+
+class TestAssignUtil(TestCase):
+ def setUp(self):
+ """Set up RequestFactory for making fast fake requests."""
+ Person.objects.create_user('test_user', 'User', 'Test', 'user@test')
+ self.factory = RequestFactory()
+ self.event = Event.objects.create(
+ slug='event-for-assignment', host=Host.objects.first())
+
+ def test_no_integer_pk(self):
+ """Ensure we fail with 404 when person PK is string, not integer."""
+ tests = [
+ (self.factory.get('/'), 'alpha'),
+ (self.factory.post('/', {'person_1': 'alpha'}), None),
+ ]
+ for request, person_id in tests:
+ with self.subTest(method=request.method):
+ with self.assertRaises(Http404):
+ assign(request, self.event, person_id=person_id)
+
+ # just reset the link, for safety sake
+ self.event.assigned_to = None
+ self.event.save()
+
+ def test_assigning(self):
+ """Ensure that with assignment is set correctly."""
+ first_person = Person.objects.first()
+ tests = [
+ (self.factory.get('/'), first_person.pk),
+ (self.factory.post('/', {'person_1': first_person.pk}), None),
+ ]
+ for request, person_id in tests:
+ with self.subTest(method=request.method):
+ # just reset the link, for safety sake
+ self.event.assigned_to = None
+ self.event.save()
+
+ assign(request, self.event, person_id=person_id)
+ self.event.refresh_from_db()
+ self.assertEqual(self.event.assigned_to, first_person)
+
+ def test_removing_assignment(self):
+ """Ensure that with person_id=None, the assignment is removed."""
+ first_person = Person.objects.first()
+ tests = [
+ (self.factory.get('/'), None),
+ (self.factory.post('/'), None),
+ ]
+ for request, person_id in tests:
+ with self.subTest(method=request.method):
+ # just re-set the link to first person, for safety sake
+ self.event.assigned_to = first_person
+ self.event.save()
+
+ assign(request, self.event, person_id=person_id)
+
+ self.event.refresh_from_db()
+ self.assertEqual(self.event.assigned_to, None)
diff --git a/workshops/util.py b/workshops/util.py
index 3aee0556c..ad5d1d7dd 100644
--- a/workshops/util.py
+++ b/workshops/util.py
@@ -762,5 +762,5 @@ def assign(request, obj, person_id):
obj.save()
- except Person.DoesNotExist:
+ except (Person.DoesNotExist, ValueError):
raise Http404("No person found matching the query.")
|
mozilla__telemetry-analysis-service-474 | Create view with jobs histories
There have been several jobs failing silently. Those jobs will soon generate alerts (#201) but it would still be convenient to have a master view in the dashboard that shows the history, and their status, of all scheduled jobs. Furthermore, every user should be able to see the history for their own jobs.
| [
{
"content": "# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, you can obtain one at http://mozilla.org/MPL/2.0/.\nfrom datetime import timedelta\n\nimport urlman\nfrom autorepr import autorepr, autostr\nfrom dj... | [
{
"content": "# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, you can obtain one at http://mozilla.org/MPL/2.0/.\nfrom datetime import timedelta\n\nimport urlman\nfrom autorepr import autorepr, autostr\nfrom dj... | diff --git a/atmo/jobs/models.py b/atmo/jobs/models.py
index 126aca0a..fb638199 100644
--- a/atmo/jobs/models.py
+++ b/atmo/jobs/models.py
@@ -345,6 +345,7 @@ class SparkJobRun(EditedAtModel):
class Meta:
get_latest_by = 'created_at'
+ ordering = ['-created_at']
__str__ = autostr('{self.jobflow_id}')
diff --git a/atmo/templates/atmo/jobs/detail.html b/atmo/templates/atmo/jobs/detail.html
index 51763c29..dc74bdfb 100644
--- a/atmo/templates/atmo/jobs/detail.html
+++ b/atmo/templates/atmo/jobs/detail.html
@@ -36,6 +36,7 @@ <h2>
<a href="#notebook" class="btn btn-sm btn-default active" aria-controls="notebook" role="tab" data-toggle="tab">Notebook</a>
<a href="#results" class="btn btn-sm btn-default" aria-controls="results" role="tab" data-toggle="tab">Results</a>
<a href="#logs" class="btn btn-sm btn-default" aria-controls="logs" role="tab" data-toggle="tab">Logs</a>
+ <a href="#runs" class="btn btn-sm btn-default" aria-controls="runs" role="tab" data-toggle="tab">Runs</a>
</div>
<div class="btn-group"
role="group"
@@ -163,6 +164,38 @@ <h4>
{% endfor %}
</ul>
</div>
+ <div role="tabpanel" class="tab-pane" id="runs">
+ {% if spark_job.has_never_run %}
+ <p>No job run history yet.</p>
+ {% else %}
+ <table class="table table-striped">
+ <thead>
+ <tr>
+ <th>Jobflow ID</th>
+ <th>EMR version</th>
+ <th>Cluster size</th>
+ <th>Last status</th>
+ <th>Scheduled</th>
+ <th>Ran</th>
+ <th>Finished</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for run in spark_job.runs.all %}
+ <tr>
+ <td><a href="https://{{ settings.AWS_CONFIG.AWS_REGION }}.console.aws.amazon.com/elasticmapreduce/home?region={{ settings.AWS_CONFIG.AWS_REGION }}#cluster-details:{{ run.jobflow_id }}">{{ run.jobflow_id }}</a></td>
+ <td>{{ run.emr_release_version }}</td>
+ <td>{{ run.size|default:"n/a" }}</td>
+ <td>{{ run.status }}</td>
+ <td>{{ run.scheduled_date|default:"n/a" }}</td>
+ <td>{{ run.run_date|default:"n/a" }}</td>
+ <td>{{ run.finished_at|default:"n/a" }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+ {% endif %}
+ </div>
</div>
</div>
<div class="col-sm-3">
|
sanic-org__sanic-878 | 0.5.5 release request
Because 0.5.4 has actual protocol parsing problem (#755) I request to quickly release 0.5.5.
It causes actual request loss and unhandlable 400 errors for the sanic users. (unless they make local patch for sanic)
| [
{
"content": "from sanic.app import Sanic\nfrom sanic.blueprints import Blueprint\n\n__version__ = '0.5.4'\n\n__all__ = ['Sanic', 'Blueprint']\n",
"path": "sanic/__init__.py"
}
] | [
{
"content": "from sanic.app import Sanic\nfrom sanic.blueprints import Blueprint\n\n__version__ = '0.6.0'\n\n__all__ = ['Sanic', 'Blueprint']\n",
"path": "sanic/__init__.py"
}
] | diff --git a/sanic/__init__.py b/sanic/__init__.py
index 4cc0710ff7..8f35a28367 100644
--- a/sanic/__init__.py
+++ b/sanic/__init__.py
@@ -1,6 +1,6 @@
from sanic.app import Sanic
from sanic.blueprints import Blueprint
-__version__ = '0.5.4'
+__version__ = '0.6.0'
__all__ = ['Sanic', 'Blueprint']
|
pypa__setuptools-4184 | [BUG] "'extras_require' must be a dictionary..." regression
### setuptools version
setuptools==67.0.0
### Python version
Python 3.10
### OS
Linux / Ubuntu
### Additional environment information
_No response_
### Description
With latest setuptools I am unable to use a package (btchip-python) as input to pip-compile requirements for hash generation. It errors with:
> error in btchip-python setup command: 'extras_require' must be a dictionary whose values are strings or lists of strings containing valid project/version requirement specifiers.
If I downgrade setuptools to a version I know works, 65.5.0, the package works again.
The problem part of the project's setup.py file is:
```
extras_require = {
'smartcard': [ 'python-pyscard>=1.6.12-4build1' ]
},
```
It is triggered in the following command pip-compile does:
> python3 setup.py egg_info
### Expected behavior
Unless there is an obvious reason that setuptools is rejecting values it accepted before, I expect it to continue to accept them and no do breaking changes.
### How to Reproduce
The simplest reproduction case is taking the erroring command from pip-compile and running it in a local copy of the package.
1. pip3 install setuptools==67.0.0
2. git clone https://github.com/LedgerHQ/btchip-python.git
3. cd btchip-python
4. python3 setup.py egg_info
5. Observe that it errors with the `extras_require` complaint.
Then with the older setuptools.
1. pip3 install setuptools==65.5.0
2. python3 setup.py egg_info
3. Observe that it generates the eggy stuff.
### Output
```console
vboxuser@Ubuntu2204:~/_src/btchip-python$ python3 setup.py egg_info
error in btchip-python setup command: 'extras_require' must be a dictionary whose values are strings or lists of strings containing valid project/version requirement specifiers.
vboxuser@Ubuntu2204:~/_src/btchip-python$ pip3 show setuptools
Name: setuptools
Version: 67.0.0
Summary: Easily download, build, install, upgrade, and uninstall Python packages
Home-page: https://github.com/pypa/setuptools
Author: Python Packaging Authority
Author-email: distutils-sig@python.org
License:
Location: /home/vboxuser/.local/lib/python3.10/site-packages
Requires:
Required-by: pip-tools
vboxuser@Ubuntu2204:~/_src/btchip-python$ pip3 install setuptools==65.5.0
Defaulting to user installation because normal site-packages is not writeable
Collecting setuptools==65.5.0
Downloading setuptools-65.5.0-py3-none-any.whl (1.2 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.2/1.2 MB 1.4 MB/s eta 0:00:00
Installing collected packages: setuptools
Attempting uninstall: setuptools
Found existing installation: setuptools 67.0.0
Uninstalling setuptools-67.0.0:
Successfully uninstalled setuptools-67.0.0
Successfully installed setuptools-65.5.0
vboxuser@Ubuntu2204:~/_src/btchip-python$ which pip3
/home/vboxuser/.local/bin/pip3
vboxuser@Ubuntu2204:~/_src/btchip-python$ python3 setup.py egg_info
running egg_info
creating btchip_python.egg-info
writing btchip_python.egg-info/PKG-INFO
writing dependency_links to btchip_python.egg-info/dependency_links.txt
writing requirements to btchip_python.egg-info/requires.txt
writing top-level names to btchip_python.egg-info/top_level.txt
writing manifest file 'btchip_python.egg-info/SOURCES.txt'
reading manifest file 'btchip_python.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
adding license file 'LICENSE'
writing manifest file 'btchip_python.egg-info/SOURCES.txt'
vboxuser@Ubuntu2204:~/_src/btchip-python$
```
| [
{
"content": "\"\"\"Translation layer between pyproject config and setuptools distribution and\nmetadata objects.\n\nThe distribution and metadata objects are modeled after (an old version of)\ncore metadata, therefore configs in the format specified for ``pyproject.toml``\nneed to be processed before being app... | [
{
"content": "\"\"\"Translation layer between pyproject config and setuptools distribution and\nmetadata objects.\n\nThe distribution and metadata objects are modeled after (an old version of)\ncore metadata, therefore configs in the format specified for ``pyproject.toml``\nneed to be processed before being app... | diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index b562f91759..4aec5f1377 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -409,7 +409,7 @@ def _acessor(obj):
"scripts": {},
"gui-scripts": {},
"dependencies": [],
- "optional-dependencies": [],
+ "optional-dependencies": {},
}
|
sanic-org__sanic-1045 | 0.6.1 release to PyPi
Hey folks,
There's been a bunch of substantive changes in the past few months; I think it warrants a release of 0.6.1 (or 0.7, considering there may be large changes in PRs like #939). Any chance we could get a new candidate uploaded to PyPi?
If there's a better place to ask this, I'm happy to head there.
| [
{
"content": "from sanic.app import Sanic\nfrom sanic.blueprints import Blueprint\n\n__version__ = '0.6.0'\n\n__all__ = ['Sanic', 'Blueprint']\n",
"path": "sanic/__init__.py"
}
] | [
{
"content": "from sanic.app import Sanic\nfrom sanic.blueprints import Blueprint\n\n__version__ = '0.7.0'\n\n__all__ = ['Sanic', 'Blueprint']\n",
"path": "sanic/__init__.py"
}
] | diff --git a/sanic/__init__.py b/sanic/__init__.py
index 8f35a28367..78bc7bd9f5 100644
--- a/sanic/__init__.py
+++ b/sanic/__init__.py
@@ -1,6 +1,6 @@
from sanic.app import Sanic
from sanic.blueprints import Blueprint
-__version__ = '0.6.0'
+__version__ = '0.7.0'
__all__ = ['Sanic', 'Blueprint']
|
biopython__biopython-3366 | `Bio.Phylo.write` in format `nexus` does not export comments.
### Setup
I am reporting a problem with Biopython version 1.77, Python version 3.7, and operating
system Ubuntu 16.04 as follows:
```python
from io import StringIO
from Bio import Phylo
t = Phylo.read(StringIO("((A,B),C);"), 'newick')
for ni,n in enumerate(t.get_terminals()):
n.comment = f"[&node_number={ni}]"
out = StringIO()
Phylo.write(t, out, "nexus")
print(out.getvalue())
```
The output is
```
#NEXUS
Begin Taxa;
Dimensions NTax=3;
TaxLabels A B C;
End;
Begin Trees;
Tree tree1=((A:0.00000,B:0.00000):0.00000,C:0.00000):0.00000;
End;
```
missing the comments I attached to the nodes.
### Expected behaviour
The output should look like this:
```
Begin Taxa;
Dimensions NTax=3;
TaxLabels A B C;
End;
Begin Trees;
Tree tree1=((A[&node_number=0]:0.00000,B[&node_number=1]:0.00000):0.00000,C[&node_number=2]:0.00000):0.00000;
End;
```
In fact, doing the reverse and parsing this tree with `Bio.Phylo.read` reads the comments correctly.
```python
nexus_in = StringIO(
"""
Begin Taxa;
Dimensions NTax=3;
TaxLabels A B C;
End;
Begin Trees;
Tree tree1=((A[&node_number=0]:0.00000,B[&node_number=1]:0.00000):0.00000,C[&node_number=2]:0.00000):0.00000;
End;
""")
t2 = Phylo.read(nexus_in, 'nexus')
for n in t2.get_terminals():
print(n.name, n.comment)
```
has the expected output
```
A [&node_number=0]
B [&node_number=1]
C [&node_number=2]
```
### Actual behaviour
`Bio.Phylo.write` in `nexus` format ignores comments, while `Bio.Phylo.read` correctly parses them from the input.
### Steps to reproduce
The above code-snippets are sufficient to reproduce the unexpected behaviour.
| [
{
"content": "# Copyright (C) 2009 by Eric Talevich (eric.talevich@gmail.com)\n# Based on Bio.Nexus, copyright 2005-2008 by Frank Kauff & Cymon J. Cox.\n# All rights reserved.\n#\n# This file is part of the Biopython distribution and governed by your\n# choice of the \"Biopython License Agreement\" or the \"BSD... | [
{
"content": "# Copyright (C) 2009 by Eric Talevich (eric.talevich@gmail.com)\n# Based on Bio.Nexus, copyright 2005-2008 by Frank Kauff & Cymon J. Cox.\n# All rights reserved.\n#\n# This file is part of the Biopython distribution and governed by your\n# choice of the \"Biopython License Agreement\" or the \"BSD... | diff --git a/Bio/Phylo/NewickIO.py b/Bio/Phylo/NewickIO.py
index 99b77816756..b9af509cc29 100644
--- a/Bio/Phylo/NewickIO.py
+++ b/Bio/Phylo/NewickIO.py
@@ -84,7 +84,7 @@ def _format_comment(text):
def _get_comment(clade):
try:
- comment = clade.coment
+ comment = clade.comment
except AttributeError:
pass
else:
|
sanic-org__sanic-1292 | New release on Pypi ?
Hello,
I was looking for a tool to autoreload my code when I develop and I found this commit : https://github.com/channelcat/sanic/commit/52c2a8484e6aa5fa13aaade49e1f2597dd006e15
So it seems Sanic already integrates it since December 07, 2017. But the the latest version on Pypi dates from the day before (https://github.com/channelcat/sanic/commit/1ea3ab7fe8ab03a6ddf4d75a3de8cb719f4c584c) : https://pypi.org/project/Sanic/#history
Is-it possible to release a new version on Pypi please ? Other features (like the UUID support in routes) are also interesting :)
Thanks in advance !
| [
{
"content": "from sanic.app import Sanic\nfrom sanic.blueprints import Blueprint\n\n__version__ = '0.7.0'\n\n__all__ = ['Sanic', 'Blueprint']\n",
"path": "sanic/__init__.py"
}
] | [
{
"content": "from sanic.app import Sanic\nfrom sanic.blueprints import Blueprint\n\n__version__ = '0.8.0'\n\n__all__ = ['Sanic', 'Blueprint']\n",
"path": "sanic/__init__.py"
}
] | diff --git a/sanic/__init__.py b/sanic/__init__.py
index 78bc7bd9f5..5e6ff4daff 100644
--- a/sanic/__init__.py
+++ b/sanic/__init__.py
@@ -1,6 +1,6 @@
from sanic.app import Sanic
from sanic.blueprints import Blueprint
-__version__ = '0.7.0'
+__version__ = '0.8.0'
__all__ = ['Sanic', 'Blueprint']
|
biolab__orange3-2988 | cannot install add-ons from the menu
<!--
This is an issue template. Please fill in the relevant details in the
sections below.
Wrap code and verbatim terminal window output into triple backticks, see:
https://help.github.com/articles/basic-writing-and-formatting-syntax/#quoting-code
If you're raising an issue about an add-on (e.g. installed via
Options > Add-ons), raise an issue on the relevant add-on's issue
tracker instead. See: https://github.com/biolab?q=orange3
-->
##### Orange version
Linux last version
##### Expected behavior
getting a list of add-ons using options/add-ons
##### Actual behavior
pop up as follows:
Error encountered:
Traceback (most recent call last):
File "/home/roudenko/.local/lib/python3.5/site-packages/Orange/canvas/application/canvasmain.py", line 1625, in open_addons
if not have_install_permissions():
File "/home/roudenko/.local/lib/python3.5/site-packages/Orange/canvas/application/addons.py", line 755, in have_install_permissions
with open(fn, "w"):
FileNotFoundError: [Errno 2] No such file or directory: '/usr/lib/python3.5/site-packages/test_write_19468'
##### Steps to reproduce the behavior
I guess the pb is related to the fact that I installed Orange locally.
Indeed, I do not even have the directory /usr/lib/python3.5/site-packages
##### Additional info (worksheets, data, screenshots, ...)
Is the only way to get add-ons in this case is to install them from your git using pip?
| [
{
"content": "import sys\nimport sysconfig\nimport os\nimport logging\nimport re\nimport errno\nimport shlex\nimport subprocess\nimport itertools\nimport concurrent.futures\n\nfrom collections import namedtuple, deque\nfrom xml.sax.saxutils import escape\nfrom distutils import version\nimport urllib.request\nim... | [
{
"content": "import sys\nimport sysconfig\nimport os\nimport logging\nimport re\nimport errno\nimport shlex\nimport subprocess\nimport itertools\nimport concurrent.futures\n\nfrom collections import namedtuple, deque\nfrom xml.sax.saxutils import escape\nfrom distutils import version\nimport urllib.request\nim... | diff --git a/Orange/canvas/application/addons.py b/Orange/canvas/application/addons.py
index 63dcb9e1ee7..41af8d096e0 100644
--- a/Orange/canvas/application/addons.py
+++ b/Orange/canvas/application/addons.py
@@ -756,7 +756,7 @@ def have_install_permissions():
pass
os.remove(fn)
return True
- except PermissionError:
+ except OSError:
return False
|
vega__altair-3387 | minimum pyarrow version enforced even if pandas is installed
The error we are facing in an environment says:
```python
RuntimeError: The pyarrow package must be version 11.0.0 or greater. Found version 6.0.1
```
And is caused by these lines:
https://github.com/altair-viz/altair/blob/main/altair/utils/core.py#L591-L592
```python
# if data is specified and type is not, infer type from data
if "type" not in attrs:
if pyarrow_available() and data is not None and isinstance(data, DataFrameLike):
...
elif isinstance(data, pd.DataFrame):
# Fallback if pyarrow is not installed or if pandas is older than 1.5
```
In that particular environment pandas is installed by default and we are not able to upgrade pyarrow.
Now the altair specifications errors as the code never tries the pandas approach as it has found a pyarrow version that is too old.
| [
{
"content": "from types import ModuleType\nfrom packaging.version import Version\nfrom importlib.metadata import version as importlib_version\n\n\ndef import_vegafusion() -> ModuleType:\n min_version = \"1.5.0\"\n try:\n version = importlib_version(\"vegafusion\")\n embed_version = importli... | [
{
"content": "from types import ModuleType\nfrom packaging.version import Version\nfrom importlib.metadata import version as importlib_version\n\n\ndef import_vegafusion() -> ModuleType:\n min_version = \"1.5.0\"\n try:\n version = importlib_version(\"vegafusion\")\n embed_version = importli... | diff --git a/altair/utils/_importers.py b/altair/utils/_importers.py
index 718fa9129..b7fa8a958 100644
--- a/altair/utils/_importers.py
+++ b/altair/utils/_importers.py
@@ -93,5 +93,5 @@ def pyarrow_available() -> bool:
try:
import_pyarrow_interchange()
return True
- except ImportError:
+ except (ImportError, RuntimeError):
return False
|
opendatacube__datacube-core-1331 | Code includes Type Annotations, but they're not made available for type checking (PEP561)
**Summary**
The ODC code is fairly well annotated with [Python type hints](https://typing.readthedocs.io/en/latest/), but these type hints aren't made availble for use in downstream packages by following [PEP 561 – Distributing and Packaging Type Information | peps.python.org](https://peps.python.org/pep-0561/).
**Proposal**
Since ODC Core includes inline type hints with the code, we need to follow [packaging type information - PEP561](https://peps.python.org/pep-0561/#packaging-type-information) by adding an empty `datacube/py.typed` file, and ensuring it's distributed by adding it to `package_data` in `setup.py`.
| [
{
"content": "#!/usr/bin/env python\n\nfrom setuptools import setup, find_packages\n\ntests_require = [\n 'hypothesis',\n 'pycodestyle',\n 'pylint',\n 'pytest',\n 'pytest-cov',\n 'pytest-timeout',\n 'pytest-httpserver',\n 'moto',\n]\ndoc_require = [\n 'Sphinx',\n 'sphinx_rtd_theme'... | [
{
"content": "#!/usr/bin/env python\n\nfrom setuptools import setup, find_packages\n\ntests_require = [\n 'hypothesis',\n 'pycodestyle',\n 'pylint',\n 'pytest',\n 'pytest-cov',\n 'pytest-timeout',\n 'pytest-httpserver',\n 'moto',\n]\ndoc_require = [\n 'Sphinx',\n 'sphinx_rtd_theme'... | diff --git a/datacube/py.typed b/datacube/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/docs/about/whats_new.rst b/docs/about/whats_new.rst
index ff1cab2c0d..1be5b7418a 100644
--- a/docs/about/whats_new.rst
+++ b/docs/about/whats_new.rst
@@ -12,6 +12,9 @@ v1.8.next
- Extend `patch_url` argument to `dc.load()` and `dc.load_data()` to Dask loading. (:pull:`1323`)
- Add `sphinx.ext.autoselectionlabel` extension to readthedoc conf to support `:ref:` command (:pull:`1325`)
- Add `pyspellcheck` for `.rst` documentation files and fix typos (:pull:`1327`)
+- Follow PEP561_ to make type hints available to other packages (:issue:`1330`)
+
+.. _PEP561: https://peps.python.org/pep-0561/
v1.8.8 (5 October 2022)
=======================
diff --git a/setup.py b/setup.py
index d315bdf69b..2721f9506a 100755
--- a/setup.py
+++ b/setup.py
@@ -82,6 +82,7 @@
),
package_data={
'': ['*.yaml', '*/*.yaml'],
+ 'datacube': ['py.typed'],
},
scripts=[],
install_requires=[
|
scikit-image__scikit-image-938 | Draw circle does not obey shape argument - v0.93
In previous version worked fine, but I just installed new version 0.93 and draw.circle is not working properly. When I apply the circle for image 1024x1024 with following arguments:
rr,cc=circle(-5.2796287128712879E+02, 1.5003712871287132E+02, 9.8910961199417170E+02, (1024,1024))
I get negative values in rr and cc arrays. Which obviously leads to errors when applied to:
img[rr,cc]=0
| [
{
"content": "# coding: utf-8\nimport numpy as np\n\n\ndef _coords_inside_image(rr, cc, shape):\n mask = (rr >= 0) & (rr < shape[0]) & (cc >= 0) & (cc < shape[1])\n return rr[mask], cc[mask]\n\n\ndef ellipse(cy, cx, yradius, xradius, shape=None):\n \"\"\"Generate coordinates of pixels within ellipse.\n... | [
{
"content": "# coding: utf-8\nimport numpy as np\n\n\ndef _coords_inside_image(rr, cc, shape):\n mask = (rr >= 0) & (rr < shape[0]) & (cc >= 0) & (cc < shape[1])\n return rr[mask], cc[mask]\n\n\ndef ellipse(cy, cx, yradius, xradius, shape=None):\n \"\"\"Generate coordinates of pixels within ellipse.\n... | diff --git a/skimage/draw/draw.py b/skimage/draw/draw.py
index cbf3ced2426..e61df40c329 100644
--- a/skimage/draw/draw.py
+++ b/skimage/draw/draw.py
@@ -60,7 +60,7 @@ def ellipse(cy, cx, yradius, xradius, shape=None):
cc += cx - xradius
if shape is not None:
- _coords_inside_image(rr, cc, shape)
+ return _coords_inside_image(rr, cc, shape)
return rr, cc
diff --git a/skimage/draw/tests/test_draw.py b/skimage/draw/tests/test_draw.py
index 2d739f0ffb8..3cb8c2a6199 100644
--- a/skimage/draw/tests/test_draw.py
+++ b/skimage/draw/tests/test_draw.py
@@ -325,6 +325,33 @@ def test_ellipse():
assert_array_equal(img, img_)
+def test_ellipse_with_shape():
+ img = np.zeros((15, 15), 'uint8')
+
+ rr, cc = ellipse(7, 7, 3, 10, shape=img.shape)
+ img[rr, cc] = 1
+
+ img_ = np.array(
+ [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
+ [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
+ [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
+ [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
+ [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
+ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
+ )
+
+ assert_array_equal(img, img_)
+
+
def test_ellipse_perimeter_dot_zeroangle():
# dot, angle == 0
img = np.zeros((30, 15), 'uint8')
|
voxel51__fiftyone-1652 | [BUG] `ImportError: cannot import name 'soft_unicode' from 'markupsafe'`
When `Jinja2<3` is installed a user will encounter this error.
```py
>>> import fiftyone as fo
Uncaught exception
Traceback (most recent call last):
File "/home/user/.local/lib/python3.8/site-packages/fiftyone/service/main.py", line 43, in <module>
from fiftyone.core.service import Service
File "/home/user/.local/lib/python3.8/site-packages/fiftyone/__init__.py", line 25, in <module>
from fiftyone.__public__ import *
File "/home/user/.local/lib/python3.8/site-packages/fiftyone/__public__.py", line 172, in <module>
from .core.session import (
File "/home/user/.local/lib/python3.8/site-packages/fiftyone/core/session.py", line 16, in <module>
from jinja2 import Template
File "/home/user/.local/lib/python3.8/site-packages/jinja2/__init__.py", line 12, in <module>
from .environment import Environment
File "/home/user/.local/lib/python3.8/site-packages/jinja2/environment.py", line 25, in <module>
from .defaults import BLOCK_END_STRING
File "/home/user/.local/lib/python3.8/site-packages/jinja2/defaults.py", line 3, in <module>
from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401
File "/home/user/.local/lib/python3.8/site-packages/jinja2/filters.py", line 13, in <module>
from markupsafe import soft_unicode
ImportError: cannot import name 'soft_unicode' from 'markupsafe' (/home/user/.local/lib/python3.8/site-packages/markupsafe/__init__.py)
```
| [
{
"content": "#!/usr/bin/env python\n\"\"\"\nInstalls FiftyOne.\n\n| Copyright 2017-2022, Voxel51, Inc.\n| `voxel51.com <https://voxel51.com/>`_\n|\n\"\"\"\nimport os\nfrom pkg_resources import DistributionNotFound, get_distribution\nimport re\nfrom setuptools import setup, find_packages\n\n\nVERSION = \"0.15.0... | [
{
"content": "#!/usr/bin/env python\n\"\"\"\nInstalls FiftyOne.\n\n| Copyright 2017-2022, Voxel51, Inc.\n| `voxel51.com <https://voxel51.com/>`_\n|\n\"\"\"\nimport os\nfrom pkg_resources import DistributionNotFound, get_distribution\nimport re\nfrom setuptools import setup, find_packages\n\n\nVERSION = \"0.15.0... | diff --git a/requirements/common.txt b/requirements/common.txt
index b21b0cc4e47..596d5b1b5ec 100644
--- a/requirements/common.txt
+++ b/requirements/common.txt
@@ -5,7 +5,7 @@ Deprecated==1.2.11
eventlet==0.31.0
Flask==1.1.2
httpx==0.7.7
-Jinja2==2.11.3
+Jinja2==3.0.3
kaleido==0.2.1
matplotlib==3.2.1
mongoengine==0.20.0
diff --git a/requirements/docs.txt b/requirements/docs.txt
index 99beeb1acf0..9d54669015c 100644
--- a/requirements/docs.txt
+++ b/requirements/docs.txt
@@ -2,7 +2,7 @@ autodocsumm==0.2.7
docutils==0.16
ipykernel==5.3.0
jupyter-client==6.1.3
-nbsphinx==0.8.5
+nbsphinx==0.8.8
sphinx-tabs==1.2.1
Sphinx==2.4.4
sphinxcontrib-napoleon==0.7
diff --git a/setup.py b/setup.py
index e50d01ddd21..4a896778029 100644
--- a/setup.py
+++ b/setup.py
@@ -36,7 +36,7 @@ def get_version():
"Deprecated",
"eventlet",
"future",
- "Jinja2",
+ "Jinja2>=3",
"kaleido",
"matplotlib",
"mongoengine==0.20.0",
|
aio-libs__aiohttp-1888 | In unit tests, Application comparisons can report false positive
Comparison between Application is performed at the MutableMapping level. MutableMapping says that, like dict objects, if all keys and matching values are the same 2 instances, then they are equals. This means that `web.Application() == web.Application()` will return True.
See:
```python
>>> a = aiohttp.web.Application()
>>> b = aiohttp.web.Application()
>>> a == b
True
>>> a["foo"] = "bar"
>>> a == b
False
>>> b["foo"] = "bar"
>>> a == b
True
```
I think those few unit tests are assuming a different behaviour:
* test_subapp_middlewares
* test_subapp_on_response_prepare
* test_subapp_on_startup
* test_subapp_on_shutdown
* test_subapp_on_cleanup
A change has been submitted for `test_subapp_middlewares` in #1854 to fix that. While the solution may or may not be accepted as is, all tests should be fixed.
Also, maybe an additional `test_application_equal` should be implemented, to be ensure expected behavior. Unless `web.Application.__eq__` special method gets implemented to change current behaviour, it should look like something like that:
```python
def test_application_equal():
app1 = web.Application()
app2 = web.Application()
assert app1 == app2
app1["foo"] = "bar"
assert app1 != app2
```
| [
{
"content": "import asyncio\nimport os\nimport socket\nimport stat\nimport sys\nimport warnings\nfrom argparse import ArgumentParser\nfrom collections import Iterable, MutableMapping\nfrom importlib import import_module\n\nfrom yarl import URL\n\nfrom . import (hdrs, web_exceptions, web_fileresponse, web_middl... | [
{
"content": "import asyncio\nimport os\nimport socket\nimport stat\nimport sys\nimport warnings\nfrom argparse import ArgumentParser\nfrom collections import Iterable, MutableMapping\nfrom importlib import import_module\n\nfrom yarl import URL\n\nfrom . import (hdrs, web_exceptions, web_fileresponse, web_middl... | diff --git a/CHANGES.rst b/CHANGES.rst
index 9330a3c7287..9f89a9ee3eb 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -43,6 +43,8 @@ Changes
- Fix sub-application middlewares resolution order #1853
+- Fix applications comparison #1866
+
2.0.7 (2017-04-12)
------------------
diff --git a/aiohttp/web.py b/aiohttp/web.py
index f117fbc4cea..e1f3d115efd 100644
--- a/aiohttp/web.py
+++ b/aiohttp/web.py
@@ -81,6 +81,9 @@ def __init__(self, *,
# MutableMapping API
+ def __eq__(self, other):
+ return self is other
+
def __getitem__(self, key):
return self._state[key]
diff --git a/tests/test_web_application.py b/tests/test_web_application.py
index 5b65ac7e2ad..0276147f999 100644
--- a/tests/test_web_application.py
+++ b/tests/test_web_application.py
@@ -219,3 +219,11 @@ def test_secure_proxy_ssl_header_init(loop):
assert app._secure_proxy_ssl_header is hdr
app.make_handler(loop=loop)
assert app._secure_proxy_ssl_header is hdr
+
+
+def test_equality():
+ app1 = web.Application()
+ app2 = web.Application()
+
+ assert app1 == app1
+ assert app1 != app2
|
Mailu__Mailu-2049 | Fetchmail: /var/lib/fetchmail needs persistence
According [fetchmail documentation](https://www.fetchmail.info/fetchmail-man.html#12), an `.idfile` is used to keep track of previously downloaded messages. Shouldn't that file persistent over container restarts?
I'm not a Fetchmail user, perhaps somebody can shine a light on how this currently works?
cc: @Nebukadneza, @hoellen, @kaiyou
| [
{
"content": "#!/usr/bin/python3\n\nimport time\nimport os\nimport tempfile\nimport shlex\nimport subprocess\nimport re\nimport requests\nimport sys\nimport traceback\n\n\nFETCHMAIL = \"\"\"\nfetchmail -N \\\n --sslcertck --sslcertpath /etc/ssl/certs \\\n -f {}\n\"\"\"\n\n\nRC_LINE = \"\"\"\npoll \"{host}... | [
{
"content": "#!/usr/bin/python3\n\nimport time\nimport os\nimport tempfile\nimport shlex\nimport subprocess\nimport re\nimport requests\nimport sys\nimport traceback\n\n\nFETCHMAIL = \"\"\"\nfetchmail -N \\\n --idfile /data/fetchids --uidl \\\n --sslcertck --sslcertpath /etc/ssl/certs \\\n -f {}\n\"\"... | diff --git a/optional/fetchmail/Dockerfile b/optional/fetchmail/Dockerfile
index 995ec48f9..068a5dcec 100644
--- a/optional/fetchmail/Dockerfile
+++ b/optional/fetchmail/Dockerfile
@@ -12,8 +12,8 @@ RUN apk add --no-cache \
RUN apk add --no-cache fetchmail ca-certificates openssl \
&& pip3 install requests
-COPY fetchmail.py /fetchmail.py
+RUN mkdir -p /data
-USER fetchmail
+COPY fetchmail.py /fetchmail.py
-CMD ["/fetchmail.py"]
+CMD ["/fetchmail.py"]
\ No newline at end of file
diff --git a/optional/fetchmail/fetchmail.py b/optional/fetchmail/fetchmail.py
index 4be3c2bdc..5459de59c 100755
--- a/optional/fetchmail/fetchmail.py
+++ b/optional/fetchmail/fetchmail.py
@@ -13,6 +13,7 @@
FETCHMAIL = """
fetchmail -N \
+ --idfile /data/fetchids --uidl \
--sslcertck --sslcertpath /etc/ssl/certs \
-f {}
"""
diff --git a/setup/flavors/compose/docker-compose.yml b/setup/flavors/compose/docker-compose.yml
index 2675a2ab2..18a881b8b 100644
--- a/setup/flavors/compose/docker-compose.yml
+++ b/setup/flavors/compose/docker-compose.yml
@@ -129,6 +129,8 @@ services:
image: ${DOCKER_ORG:-mailu}/${DOCKER_PREFIX:-}fetchmail:${MAILU_VERSION:-{{ version }}}
restart: always
env_file: {{ env }}
+ volumes:
+ - "{{ root }}/data/fetchmail:/data"
{% if resolver_enabled %}
depends_on:
- resolver
diff --git a/setup/flavors/stack/docker-compose.yml b/setup/flavors/stack/docker-compose.yml
index 24afa9f33..0c744d7ec 100644
--- a/setup/flavors/stack/docker-compose.yml
+++ b/setup/flavors/stack/docker-compose.yml
@@ -110,7 +110,7 @@ services:
image: ${DOCKER_ORG:-mailu}/${DOCKER_PREFIX:-}fetchmail:${MAILU_VERSION:-{{ version }}}
env_file: {{ env }}
volumes:
- - "{{ root }}/data:/data"
+ - "{{ root }}/data/fetchmail:/data"
deploy:
replicas: 1
healthcheck:
diff --git a/towncrier/newsfragments/1223.bugfix b/towncrier/newsfragments/1223.bugfix
new file mode 100644
index 000000000..3c23d1a4d
--- /dev/null
+++ b/towncrier/newsfragments/1223.bugfix
@@ -0,0 +1,4 @@
+Fixed fetchmail losing track of fetched emails upon container recreation.
+The relevant fetchmail files are now retained in the /data folder (in the fetchmail image).
+See the docker-compose.yml file for the relevant volume mapping.
+If you already had your own mapping, you must double check the volume mapping and take action.
|
rotki__rotki-1873 | SOL token is Solana, not Sola
## Problem Definition
The SOL token on the exchanges is reported with the correct value but the wrong name. Its Solana, not Sola.
| [
{
"content": "from dataclasses import dataclass, field\nfrom functools import total_ordering\nfrom typing import Any, Optional, Type, TypeVar\n\nfrom rotkehlchen.assets.resolver import AssetResolver\nfrom rotkehlchen.errors import DeserializationError, UnknownAsset, UnsupportedAsset\nfrom rotkehlchen.typing imp... | [
{
"content": "from dataclasses import dataclass, field\nfrom functools import total_ordering\nfrom typing import Any, Optional, Type, TypeVar\n\nfrom rotkehlchen.assets.resolver import AssetResolver\nfrom rotkehlchen.errors import DeserializationError, UnknownAsset, UnsupportedAsset\nfrom rotkehlchen.typing imp... | diff --git a/docs/changelog.rst b/docs/changelog.rst
index 1b3713bb6a..0542817bbb 100755
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -2,6 +2,7 @@
Changelog
=========
+* :bug:`1868` Binance SOL token is now properly mapped to Solana.
* :bug:`1849` Binance queries should no longer randomly fail with invalid signature.
* :bug:`1846` AMPL token balance should no longer be double counted.
diff --git a/rotkehlchen/assets/asset.py b/rotkehlchen/assets/asset.py
index 7f868ae1ae..58c37e0b83 100644
--- a/rotkehlchen/assets/asset.py
+++ b/rotkehlchen/assets/asset.py
@@ -141,6 +141,8 @@
'LUNA-2': 'LUNA',
# YOYOW is known as YOYO in Binance
'YOYOW': 'YOYO',
+ # Solana is SOL-2 in rotki
+ 'SOL-2': 'SOL',
}
diff --git a/rotkehlchen/tests/api/test_aave.py b/rotkehlchen/tests/api/test_aave.py
index 5f0aa485e4..98fe22ba34 100644
--- a/rotkehlchen/tests/api/test_aave.py
+++ b/rotkehlchen/tests/api/test_aave.py
@@ -206,7 +206,7 @@ def _query_borrowing_aave_history_test(setup: BalancesTestSetup, server: APIServ
total_lost = result[AAVE_TEST_ACC_3]['total_lost']
total_earned_liquidations = result[AAVE_TEST_ACC_3]['total_earned_liquidations']
- assert len(total_earned_interest) == 1
+ assert len(total_earned_interest) >= 1
assert len(total_earned_interest['aWBTC']) == 2
assert FVal(total_earned_interest['aWBTC']['amount']) >= FVal('0.00000833')
assert FVal(total_earned_interest['aWBTC']['usd_value']) >= ZERO
diff --git a/rotkehlchen/tests/api/test_balances.py b/rotkehlchen/tests/api/test_balances.py
index dc398bfdfb..7b714ebb7e 100644
--- a/rotkehlchen/tests/api/test_balances.py
+++ b/rotkehlchen/tests/api/test_balances.py
@@ -13,6 +13,7 @@
from rotkehlchen.constants.misc import ZERO
from rotkehlchen.fval import FVal
from rotkehlchen.tests.utils.api import (
+ ASYNC_TASK_WAIT_TIMEOUT,
api_url_for,
assert_error_response,
assert_ok_async_response,
@@ -580,8 +581,9 @@ def test_balances_caching_mixup(
task_id_btc,
)
result_eth = wait_for_async_task_with_result(
- rotkehlchen_api_server,
- task_id_eth,
+ server=rotkehlchen_api_server,
+ task_id=task_id_eth,
+ timeout=ASYNC_TASK_WAIT_TIMEOUT * 2,
)
assert result_eth['per_account']['ETH'][ethereum_accounts[0]]['assets']['ETH']['amount'] == '1' # noqa: E501
assert result_eth['per_account']['ETH'][ethereum_accounts[0]]['assets']['RDN']['amount'] == '2' # noqa: E501
diff --git a/rotkehlchen/tests/api/test_blockchain.py b/rotkehlchen/tests/api/test_blockchain.py
index 9db8805804..0a66a26cc3 100644
--- a/rotkehlchen/tests/api/test_blockchain.py
+++ b/rotkehlchen/tests/api/test_blockchain.py
@@ -346,7 +346,7 @@ def _add_blockchain_accounts_test_start(
result = wait_for_async_task_with_result(
api_server,
task_id,
- timeout=ASYNC_TASK_WAIT_TIMEOUT * 2,
+ timeout=ASYNC_TASK_WAIT_TIMEOUT * 4,
)
else:
result = assert_proper_response_with_result(response)
|
ivy-llc__ivy-14488 | conj
| [
{
"content": "# global\nfrom typing import Any\nimport itertools\nimport string\nfrom builtins import slice as py_slice\n\n# local\nimport ivy\nfrom ivy.functional.frontends.jax.func_wrapper import to_ivy_arrays_and_back\n\n\n@to_ivy_arrays_and_back\ndef abs(x):\n return ivy.abs(x)\n\n\n@to_ivy_arrays_and_ba... | [
{
"content": "# global\nfrom typing import Any\nimport itertools\nimport string\nfrom builtins import slice as py_slice\n\n# local\nimport ivy\nfrom ivy.functional.frontends.jax.func_wrapper import to_ivy_arrays_and_back\n\n\n@to_ivy_arrays_and_back\ndef abs(x):\n return ivy.abs(x)\n\n\n@to_ivy_arrays_and_ba... | diff --git a/ivy/functional/frontends/jax/lax/operators.py b/ivy/functional/frontends/jax/lax/operators.py
index 0acc4a4ddaf2b..3709196dcbf10 100644
--- a/ivy/functional/frontends/jax/lax/operators.py
+++ b/ivy/functional/frontends/jax/lax/operators.py
@@ -620,3 +620,8 @@ def real(x):
@to_ivy_arrays_and_back
def nextafter(x1, x2):
return ivy.nextafter(x1, x2)
+
+
+@to_ivy_arrays_and_back
+def conj(x):
+ return ivy.conj(x)
diff --git a/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_lax_operators.py b/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_lax_operators.py
index 12ba1d34e59ea..78ffccd2561ec 100644
--- a/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_lax_operators.py
+++ b/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_lax_operators.py
@@ -2553,3 +2553,29 @@ def test_jax_lax_nextafter(
x1=x[0],
x2=x[0],
)
+
+
+# conj
+@handle_frontend_test(
+ fn_tree="jax.lax.conj",
+ dtype_and_x=helpers.dtype_and_values(
+ available_dtypes=["complex64"],
+ ),
+)
+def test_jax_lax_conj(
+ *,
+ dtype_and_x,
+ test_flags,
+ on_device,
+ fn_tree,
+ frontend,
+):
+ input_dtype, x = dtype_and_x
+ helpers.test_frontend_function(
+ input_dtypes=input_dtype,
+ test_flags=test_flags,
+ frontend=frontend,
+ fn_tree=fn_tree,
+ on_device=on_device,
+ x=x[0],
+ )
|
iterative__dvc-2282 | test: s3: use moto to test multipart objects
Currently, we are unable to use it because of [this bug](https://github.com/spulec/moto/issues/2154). When it is fixed, we should switch to it from using actual s3 for unit testing. Related to https://github.com/iterative/dvc/pull/1867
| [
{
"content": "from setuptools import setup, find_packages\nfrom setuptools.command.build_py import build_py as _build_py\nimport os\nimport sys\n\n\n# https://packaging.python.org/guides/single-sourcing-package-version/\npkg_dir = os.path.dirname(__file__)\n\n# This will define __version__ implicitly\nwith open... | [
{
"content": "from setuptools import setup, find_packages\nfrom setuptools.command.build_py import build_py as _build_py\nimport os\nimport sys\n\n\n# https://packaging.python.org/guides/single-sourcing-package-version/\npkg_dir = os.path.dirname(__file__)\n\n# This will define __version__ implicitly\nwith open... | diff --git a/scripts/ci/install.sh b/scripts/ci/install.sh
index 40a73a133d..dd69f6caee 100644
--- a/scripts/ci/install.sh
+++ b/scripts/ci/install.sh
@@ -17,6 +17,10 @@ function retry {
retry pip install --upgrade pip setuptools wheel
retry pip install .[all,tests]
+# NOTE: waiting for https://github.com/spulec/moto/issues/2172
+pip uninstall -y moto
+retry pip install git+https://github.com/efiop/moto.git@move-env-mocking
+
git config --global user.email "dvctester@example.com"
git config --global user.name "DVC Tester"
diff --git a/setup.py b/setup.py
index da01f7fb96..28d85847ab 100644
--- a/setup.py
+++ b/setup.py
@@ -102,6 +102,7 @@ def run(self):
"pydocstyle<4.0",
"jaraco.windows==3.9.2",
"mock-ssh-server>=0.5.0",
+ "moto",
]
if (sys.version_info) >= (3, 6):
diff --git a/tests/func/test_s3.py b/tests/func/test_s3.py
index 3bcb64ba3a..9cead965fe 100644
--- a/tests/func/test_s3.py
+++ b/tests/func/test_s3.py
@@ -1,8 +1,32 @@
import boto3
-import pytest
+
+from moto import mock_s3
+from functools import wraps
+import moto.s3.models as s3model
from dvc.remote.s3 import RemoteS3
-from tests.func.test_data_cloud import _should_test_aws, get_aws_url
+from tests.func.test_data_cloud import get_aws_url
+
+
+# from https://github.com/spulec/moto/blob/v1.3.5/tests/test_s3/test_s3.py#L40
+REDUCED_PART_SIZE = 256
+
+
+def reduced_min_part_size(f):
+ """ speed up tests by temporarily making the multipart minimum part size
+ small
+ """
+ orig_size = s3model.UPLOAD_PART_MIN_SIZE
+
+ @wraps(f)
+ def wrapped(*args, **kwargs):
+ try:
+ s3model.UPLOAD_PART_MIN_SIZE = REDUCED_PART_SIZE
+ return f(*args, **kwargs)
+ finally:
+ s3model.UPLOAD_PART_MIN_SIZE = orig_size
+
+ return wrapped
def _get_src_dst():
@@ -10,13 +34,12 @@ def _get_src_dst():
return base_info / "from", base_info / "to"
+@mock_s3
def test_copy_singlepart_preserve_etag():
from_info, to_info = _get_src_dst()
- if not _should_test_aws():
- pytest.skip()
-
s3 = boto3.client("s3")
+ s3.create_bucket(Bucket=from_info.bucket)
s3.put_object(Bucket=from_info.bucket, Key=from_info.path, Body="data")
RemoteS3._copy(s3, from_info, to_info, {})
@@ -32,8 +55,8 @@ def _upload_multipart(s3, Bucket, Key):
# NOTE: Generation parts of variable size. Part size should be at
# least 5MB:
# https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
- part_size = 10 * 1024 * 1024 + 2 ** i
- body = str(i) * part_size
+ part_size = REDUCED_PART_SIZE + i
+ body = b"1" * part_size
part = s3.upload_part(
Bucket=Bucket,
Key=Key,
@@ -52,12 +75,12 @@ def _upload_multipart(s3, Bucket, Key):
)
+@mock_s3
+@reduced_min_part_size
def test_copy_multipart_preserve_etag():
from_info, to_info = _get_src_dst()
- if not _should_test_aws():
- pytest.skip()
-
s3 = boto3.client("s3")
+ s3.create_bucket(Bucket=from_info.bucket)
_upload_multipart(s3, from_info.bucket, from_info.path)
RemoteS3._copy(s3, from_info, to_info, {})
|
xonsh__xonsh-4511 | "Little Bobby Colors": $PROMPT evaluates {colors} *after* substitution from external input
## xonfig
<details>
```
+------------------+----------------------+
| xonsh | 0.9.27 |
| Git SHA | 71fe9014 |
| Commit Date | Jan 29 08:58:58 2021 |
| Python | 3.9.5 |
| PLY | 3.11 |
| have readline | True |
| prompt toolkit | 3.0.19 |
| shell type | prompt_toolkit |
| pygments | 2.9.0 |
| on posix | True |
| on linux | True |
| distro | ubuntu |
| on darwin | False |
| on windows | False |
| on cygwin | False |
| on msys2 | False |
| is superuser | False |
| default encoding | utf-8 |
| xonsh encoding | utf-8 |
| encoding errors | surrogateescape |
| on jupyter | False |
| jupyter kernel | None |
| xontrib 1 | apt_tabcomplete |
| xontrib 2 | direnv |
| xontrib 3 | kitty |
| xontrib 4 | prompt_ret_code |
+------------------+----------------------+
```
</details>
## Expected Behavior
When a prompt includes shell-external names (like working directory, running job name, etc), any "meta" command directives should be escaped, lest unusual names trigger surprising (and perhaps even harmful) behavior.
## Current Behavior
```
$ xonsh --no-rc # just to demo with the default prompt
... default xonsh message ...
egnor@ostrich ~ $ mkdir '{' # slightly odd directory name
egnor@ostrich ~ $ cd '{' # here we go!
{BOLD_GREEN}egnor@ostrich{BOLD_BLUE} ~/{{BOLD_INTENSE_YELLOW}{RESET} {BOLD_BLUE}
${RESET}
```
Suddenly the prompt is barfing, because the curly braces are no longer balanced, because the `{` directory name was substituted into the prompt. This is also fun:
```
egnor@ostrich ~ $ mkdir '{BACKGROUND_RED} ALERT'
egnor@ostrich ~ $ cd '{BACKGROUND_RED} ALERT'
egnor@ostrich ~/ ALERT $
```
...and "ALERT" gets a bright red background color in the prompt. As far as I know, nothing in curly braces will do anything particularly terrible (nothing takes any arguments) so I don't _think_ this is a security issue but it sure doesn't feel right.
## Steps to Reproduce
1. Have a prompt that shows the current directory (e.g. the default prompt)
2. Create a directory with `{` / `}` characters in it, perhaps even color tags like `{RED}`
3. Enter that directory
## VERY VERY HUMBLE editorializing 🙇
<sup>(Please take this commentary with a HUGE grain of salt :salt: because I am a super newcomer to xonsh and not a contributor (yet?), though I'd be happy to help improve this... I _love_ xonsh's overall structure and direction!)</sup>
<sup>This problem could be fixed by somehow "escaping" the output from `{cwd}` and the like, OR by doing color substitution before/while doing other expansions (rather than expanding `{cwd}` in one pass, then expanding colors like `{RED}` in a separate pass)...</sup>
<sup>BUT I don't love the little mini language used in `$PROMPT` and friends (`$RIGHT_PROMPT`, `$TITLE`, `$BOTTOM_TOOLBAR`, etc). It's conventional to have such a little mini language in shells but I think xonsh can do better _and_ be simpler. Fundamentally this is yet another little string interpolation mini template language, with the usual problems of escaping "text" vs "markup".</sup>
<sup>But since this is all Python we don't really need "markup" and its attendant escaping problems. $PROMPT could just be a function that returns the prompt to show. That function can then call whatever handy dandy utility functions it wants to (to get the cwd formatted various ways, hostname, git status, etc) and assemble it using ordinary Python string manipulation (f-strings or `string.Template` or just the `+` operator), no fuss, no muss, no weird little special curly-brackets-with-colons things to learn. Colors and similar text formatting could be supported with a `ColoredStr` class which could be constructed and concatenated (with other `ColoredStr` and/or regular `str`) and sliced much like `str`. Then everything would be clean and easy and Pythonic without curly braces flopping about.</sup>
<sup>(End humble editorializing!)</sup>
## For community
⬇️ **Please click the 👍 reaction instead of leaving a `+1` or 👍 comment**
| [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"CWD related prompt formatter\"\"\"\n\nimport os\nimport shutil\n\nimport xonsh.tools as xt\nimport xonsh.platform as xp\nfrom xonsh.built_ins import XSH\n\n\ndef _replace_home(x):\n if xp.ON_WINDOWS:\n home = XSH.env[\"HOMEDRIVE\"] + XSH.env[\"HOMEPATH\"][0... | [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"CWD related prompt formatter\"\"\"\n\nimport os\nimport shutil\n\nimport xonsh.tools as xt\nimport xonsh.platform as xp\nfrom xonsh.built_ins import XSH\n\n\ndef _replace_home(x):\n if xp.ON_WINDOWS:\n home = XSH.env[\"HOMEDRIVE\"] + XSH.env[\"HOMEPATH\"][0... | diff --git a/news/pwd-curly-escape.rst b/news/pwd-curly-escape.rst
new file mode 100644
index 0000000000..4a069b1eba
--- /dev/null
+++ b/news/pwd-curly-escape.rst
@@ -0,0 +1,23 @@
+**Added:**
+
+* <news item>
+
+**Changed:**
+
+* Curly braces { } in directory names are now escaped in the prompt
+
+**Deprecated:**
+
+* <news item>
+
+**Removed:**
+
+* <news item>
+
+**Fixed:**
+
+* <news item>
+
+**Security:**
+
+* <news item>
diff --git a/tests/prompt/test_cwd.py b/tests/prompt/test_cwd.py
new file mode 100644
index 0000000000..977a64b094
--- /dev/null
+++ b/tests/prompt/test_cwd.py
@@ -0,0 +1,16 @@
+from xonsh.prompt.cwd import _replace_home_cwd
+from xonsh.built_ins import XSH
+
+
+def test_cwd_escapes_curly_brackets_with_more_curly_brackets():
+ XSH.env["PWD"] = "{foo}"
+ assert _replace_home_cwd() == "{{foo}}"
+
+ XSH.env["PWD"] = "{{foo}}"
+ assert _replace_home_cwd() == "{{{{foo}}}}"
+
+ XSH.env["PWD"] = "{"
+ assert _replace_home_cwd() == "{{"
+
+ XSH.env["PWD"] = "}}"
+ assert _replace_home_cwd() == "}}}}"
diff --git a/xonsh/prompt/cwd.py b/xonsh/prompt/cwd.py
index 032b0e9755..32d1685bb1 100644
--- a/xonsh/prompt/cwd.py
+++ b/xonsh/prompt/cwd.py
@@ -27,7 +27,8 @@ def _replace_home(x):
def _replace_home_cwd():
- return _replace_home(XSH.env["PWD"])
+ pwd = XSH.env["PWD"].replace("{", "{{").replace("}", "}}")
+ return _replace_home(pwd)
def _collapsed_pwd():
|
doccano__doccano-1842 | Doccano is not importing any text data
Hello,
Doccano is not importing any text data. When importing the text data the following browser loading is going on:

The command line terminal is showing the following:-
```
<Starting server with port 8000.
WARNING:waitress.queue:Task queue depth is 1
WARNING:waitress.queue:Task queue depth is 2
Bad Request: /v1/auth/login/
WARNING:django.request:Bad Request: /v1/auth/login/
WARNING:waitress.queue:Task queue depth is 1
WARNING:waitress.queue:Task queue depth is 2
WARNING:waitress.queue:Task queue depth is 1
WARNING:waitress.queue:Task queue depth is 1
WARNING:waitress.queue:Task queue depth is 1>
```
Your Environment
---------
* Operating System: Windows 10
* Python Version Used: 3.10
* When you install doccano: Few days back
* How did you install doccano (Heroku button etc): Command Line
| [
{
"content": "import argparse\nimport multiprocessing\nimport os\nimport platform\nimport sys\nfrom pathlib import Path\n\nimport django\nfrom django.core import management\nfrom environs import Env\n\nfrom .config.celery import app\n\nenv = Env()\nDOCCANO_HOME = os.path.expanduser(os.environ.get(\"DOCCANO_HOME... | [
{
"content": "import argparse\nimport multiprocessing\nimport os\nimport platform\nimport sys\nfrom pathlib import Path\n\nimport django\nfrom django.core import management\nfrom environs import Env\n\nfrom .config.celery import app\n\nenv = Env()\nDOCCANO_HOME = os.path.expanduser(os.environ.get(\"DOCCANO_HOME... | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 9872731980..04a7192419 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -9,35 +9,35 @@ jobs:
run:
working-directory: ./backend
steps:
- - uses: actions/checkout@v2
- - name: Set up Python 3.8
- uses: actions/setup-python@v2
- with:
- python-version: 3.8
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- pip install poetry
- poetry install
- - name: Run migrations
- run: |
- poetry run task wait_for_db
- poetry run task migrate
- - name: Lint with flake8
- run: |
- poetry run task flake8
- - name: Lint with isort
- run: |
- poetry run task isort
- - name: Black
- run: |
- poetry run task black
- - name: mypy
- run: |
- poetry run task mypy
- - name: Run tests
- run: |
- poetry run task test
+ - uses: actions/checkout@v2
+ - name: Set up Python 3.8
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.8
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install poetry
+ poetry install
+ - name: Run migrations
+ run: |
+ poetry run task wait_for_db
+ poetry run task migrate
+ - name: Lint with flake8
+ run: |
+ poetry run task flake8
+ - name: Lint with isort
+ run: |
+ poetry run task isort
+ - name: Black
+ run: |
+ poetry run task black
+ - name: mypy
+ run: |
+ poetry run task mypy
+ - name: Run tests
+ run: |
+ poetry run task test
frontend:
runs-on: ubuntu-latest
@@ -48,13 +48,15 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
- node-version: '16'
+ node-version: "16"
- name: Install Yarn
run: npm install -g yarn
- name: Install npm modules
run: yarn install
- name: Lint
run: yarn lint
+ - name: Prettier
+ run: yarn lint:prettier
docker-lint:
runs-on: ubuntu-latest
diff --git a/backend/cli.py b/backend/cli.py
index 51c0f6c9e7..cdbfc7c67a 100644
--- a/backend/cli.py
+++ b/backend/cli.py
@@ -66,7 +66,7 @@ def run_on_windows(args):
from config.wsgi import application
- serve(application, port=args.port)
+ serve(application, port=args.port, threads=args.workers)
def command_db_init(args):
diff --git a/frontend/.eslintrc.js b/frontend/.eslintrc.js
index 96da87adb2..a1e0ca0d14 100644
--- a/frontend/.eslintrc.js
+++ b/frontend/.eslintrc.js
@@ -4,27 +4,23 @@ module.exports = {
browser: true,
node: true
},
- extends: [
- '@nuxtjs',
- 'plugin:nuxt/recommended',
- '@nuxtjs/eslint-config-typescript',
- 'prettier'
- ],
+ extends: ['@nuxtjs/eslint-config-typescript', 'plugin:nuxt/recommended', 'prettier'],
rules: {
- 'no-console': 'off',
- 'no-restricted-syntax': [
+ 'no-console': process.env.NODE_ENV === 'production' ? 'warn' : 'off',
+ 'no-debugger': process.env.NODE_ENV === 'production' ? 'warn' : 'off',
+ 'no-useless-constructor': 'off',
+ camelcase: 'off',
+ 'max-len': [
'error',
+ 100,
+ 2,
{
- selector: "CallExpression[callee.object.name='console'][callee.property.name!=/^(log|warn|error|info|trace)$/]",
- message: 'Unexpected property on console object was called'
+ ignoreUrls: true,
+ ignoreComments: false,
+ ignoreRegExpLiterals: true,
+ ignoreStrings: true,
+ ignoreTemplateLiterals: true
}
- ],
- // 'vue/valid-template-root': 'off',
- // 'space-before-function-paren': ['error', 'never'],
- 'no-useless-constructor': 'off',
- // '@typescript-eslint/no-useless-constructor': 'off',
- // 'no-unused-vars': 'off',
- // '@typescript-eslint/no-unused-vars': 'off',
- 'camelcase': 'off'
+ ]
}
}
diff --git a/frontend/.prettierignore b/frontend/.prettierignore
new file mode 100644
index 0000000000..8d9ce052a1
--- /dev/null
+++ b/frontend/.prettierignore
@@ -0,0 +1,8 @@
+.nuxt/
+assets/
+coverage/
+dist/
+node_modules/
+static/
+*.html
+*.md
diff --git a/frontend/.prettierrc b/frontend/.prettierrc
new file mode 100644
index 0000000000..a65b64adeb
--- /dev/null
+++ b/frontend/.prettierrc
@@ -0,0 +1,6 @@
+{
+ "printWidth": 100,
+ "semi": false,
+ "singleQuote": true,
+ "trailingComma": "none"
+}
diff --git a/frontend/components/auth/FormLogin.vue b/frontend/components/auth/FormLogin.vue
index c561926c68..0b5c68b3e4 100644
--- a/frontend/components/auth/FormLogin.vue
+++ b/frontend/components/auth/FormLogin.vue
@@ -7,12 +7,7 @@
>
<template #content>
<v-form v-model="valid">
- <v-alert
- v-show="showError"
- v-model="showError"
- type="error"
- dismissible
- >
+ <v-alert v-show="showError" v-model="showError" type="error" dismissible>
{{ $t('errors.invalidUserOrPass') }}
</v-alert>
<v-text-field
diff --git a/frontend/components/comment/Comment.vue b/frontend/components/comment/Comment.vue
index 7dd7b1780c..66d9441697 100644
--- a/frontend/components/comment/Comment.vue
+++ b/frontend/components/comment/Comment.vue
@@ -11,41 +11,26 @@
<v-list-item-content>
<v-list-item-title>{{ comment.username }}</v-list-item-title>
<v-list-item-subtitle>
- {{ comment.createdAt | dateParse('YYYY-MM-DDTHH:mm:ss') | dateFormat('DD/MM/YYYY HH:mm') }}
+ {{
+ comment.createdAt | dateParse('YYYY-MM-DDTHH:mm:ss') | dateFormat('DD/MM/YYYY HH:mm')
+ }}
</v-list-item-subtitle>
</v-list-item-content>
- <v-row
- align="center"
- justify="end"
- >
- <v-menu
- v-if="comment.user == userId"
- bottom
- left
- >
+ <v-row align="center" justify="end">
+ <v-menu v-if="comment.user == userId" bottom left>
<template #activator="{ on, attrs }">
- <v-btn
- icon
- v-bind="attrs"
- v-on="on"
- >
+ <v-btn icon v-bind="attrs" v-on="on">
<v-icon>{{ mdiDotsVertical }}</v-icon>
</v-btn>
</template>
<v-list>
<v-list-item>
- <v-list-item-title
- @click="showEdit=true"
- >
- Edit
- </v-list-item-title>
+ <v-list-item-title @click="showEdit = true"> Edit </v-list-item-title>
</v-list-item>
<v-list-item>
- <v-list-item-title
- @click="$emit('delete-comment', comment)"
- >
+ <v-list-item-title @click="$emit('delete-comment', comment)">
Delete
</v-list-item-title>
</v-list-item>
@@ -59,27 +44,12 @@
<span v-if="!showEdit">
{{ comment.text }}
</span>
- <v-form
- v-else
- v-model="valid"
- >
+ <v-form v-else v-model="valid">
<v-row>
- <v-textarea
- v-model="editText"
- auto-grow
- rows="1"
- solo
- :rules="commentRules"
- />
+ <v-textarea v-model="editText" auto-grow rows="1" solo :rules="commentRules" />
</v-row>
<v-row justify="end">
- <v-btn
- text
- class="text-capitalize"
- @click="cancel"
- >
- Cancel
- </v-btn>
+ <v-btn text class="text-capitalize" @click="cancel"> Cancel </v-btn>
<v-btn
:disabled="!valid"
color="primary"
@@ -119,9 +89,7 @@ export default Vue.extend({
return {
showEdit: false,
editText: this.comment.text,
- commentRules: [
- (v: string) => !!v.trim() || 'Comment is required'
- ],
+ commentRules: [(v: string) => !!v.trim() || 'Comment is required'],
valid: false,
mdiAccountCircle,
mdiDotsVertical
@@ -131,10 +99,10 @@ export default Vue.extend({
methods: {
updateComment(newText: string) {
this.showEdit = false
- const comment = {...this.comment, text:newText }
+ const comment = { ...this.comment, text: newText }
this.$emit('update-comment', comment)
},
-
+
cancel() {
this.showEdit = false
this.editText = this.comment.text
diff --git a/frontend/components/comment/CommentList.vue b/frontend/components/comment/CommentList.vue
index a847d94165..92e3f7b2d8 100644
--- a/frontend/components/comment/CommentList.vue
+++ b/frontend/components/comment/CommentList.vue
@@ -10,7 +10,7 @@
:loading-text="$t('generic.loading')"
:no-data-text="$t('vuetify.noDataAvailable')"
:footer-props="{
- 'showFirstLastPage': true,
+ showFirstLastPage: true,
'items-per-page-options': [10, 50, 100],
'items-per-page-text': $t('vuetify.itemsPerPageText'),
'page-text': $t('dataset.pageText')
@@ -20,7 +20,9 @@
@input="$emit('input', $event)"
>
<template #[`item.createdAt`]="{ item }">
- <span>{{ item.createdAt | dateParse('YYYY-MM-DDTHH:mm:ss') | dateFormat('DD/MM/YYYY HH:mm') }}</span>
+ <span>{{
+ item.createdAt | dateParse('YYYY-MM-DDTHH:mm:ss') | dateFormat('DD/MM/YYYY HH:mm')
+ }}</span>
</template>
<template #top>
<v-text-field
@@ -88,7 +90,7 @@ export default Vue.extend({
{ text: this.$t('dataset.text'), value: 'text' },
{ text: this.$t('user.username'), value: 'username' },
{ text: this.$t('comments.created_at'), value: 'createdAt' },
- { text: this.$t('dataset.action'), value: 'action' },
+ { text: this.$t('dataset.action'), value: 'action' }
],
mdiMagnify
}
@@ -117,7 +119,7 @@ export default Vue.extend({
})
this.options.page = 1
}
- },
+ }
// methods: {
// toLabeling(item: CommentReadDTO) {
diff --git a/frontend/components/comment/FormCreate.vue b/frontend/components/comment/FormCreate.vue
index b15863ee26..96d6f87cdc 100644
--- a/frontend/components/comment/FormCreate.vue
+++ b/frontend/components/comment/FormCreate.vue
@@ -28,9 +28,7 @@ import Vue from 'vue'
export default Vue.extend({
data() {
return {
- commentRules: [
- (v: string) => !!v.trim() || 'Comment is required'
- ],
+ commentRules: [(v: string) => !!v.trim() || 'Comment is required'],
message: '',
valid: false
}
diff --git a/frontend/components/configAutoLabeling/ConfigCreationForm.vue b/frontend/components/configAutoLabeling/ConfigCreationForm.vue
index af594dd021..7ae36e9b53 100644
--- a/frontend/components/configAutoLabeling/ConfigCreationForm.vue
+++ b/frontend/components/configAutoLabeling/ConfigCreationForm.vue
@@ -1,15 +1,10 @@
<template>
- <v-stepper
- v-model="step.count"
- >
+ <v-stepper v-model="step.count">
<v-overlay :value="isLoading">
<v-progress-circular indeterminate size="64" />
</v-overlay>
<config-header :step="step.count" />
- <config-template-name
- v-model="fields"
- @next="step.next()"
- />
+ <config-template-name v-model="fields" @next="step.next()" />
<config-parameters
v-if="fields.modelAttrs !== undefined"
v-model="fields.modelAttrs"
@@ -86,37 +81,44 @@ export default Vue.extend({
watch: {
'fields.modelName'() {
- this.passTesting = {parameter: false, template: false, mapping: false}
- },
- 'fields.modelAttrs': {
- handler() {
- this.passTesting = {parameter: false, template: false, mapping: false}
- },
- deep: true
- },
+ this.passTesting = { parameter: false, template: false, mapping: false }
+ },
+ 'fields.modelAttrs': {
+ handler() {
+ this.passTesting = {
+ parameter: false,
+ template: false,
+ mapping: false
+ }
+ },
+ deep: true
+ },
'fields.template'() {
- this.passTesting = {parameter: true, template: false, mapping: false}
+ this.passTesting = { parameter: true, template: false, mapping: false }
},
'fields.labelMapping': {
- handler() {
- this.passTesting = {parameter: true, template: true, mapping: false}
- },
- deep: true
- },
+ handler() {
+ this.passTesting = { parameter: true, template: true, mapping: false }
+ },
+ deep: true
+ }
},
methods: {
- testConfig(promise: Promise<any>, key: 'parameter'|'template'|'mapping') {
+ testConfig(promise: Promise<any>, key: 'parameter' | 'template' | 'mapping') {
this.isLoading = true
- promise.then((value) => {
- this.response[key] = value
- this.passTesting[key] = true
- this.errors = []
- }).catch((error) => {
- this.errors = [error.message]
- }).finally(() => {
- this.isLoading = false
- })
+ promise
+ .then((value) => {
+ this.response[key] = value
+ this.passTesting[key] = true
+ this.errors = []
+ })
+ .catch((error) => {
+ this.errors = [error.message]
+ })
+ .finally(() => {
+ this.isLoading = false
+ })
},
testParameters(text: string) {
const projectId = this.$route.params.id
@@ -140,7 +142,8 @@ export default Vue.extend({
const projectId = this.$route.params.id
const item = ConfigItem.parseFromUI(this.fields)
this.isLoading = true
- this.$services.config.save(projectId, item)
+ this.$services.config
+ .save(projectId, item)
.then(() => {
this.step.first()
this.$emit('onCreate')
diff --git a/frontend/components/configAutoLabeling/ConfigList.vue b/frontend/components/configAutoLabeling/ConfigList.vue
index 0136cdd18c..b3940d7690 100644
--- a/frontend/components/configAutoLabeling/ConfigList.vue
+++ b/frontend/components/configAutoLabeling/ConfigList.vue
@@ -11,23 +11,23 @@
>
<template #top>
<div class="ma-4">
- <v-btn
- class="primary text-capitalize"
- @click="dialogCreate=true"
- >
+ <v-btn class="primary text-capitalize" @click="dialogCreate = true">
{{ $t('generic.create') }}
</v-btn>
<v-btn
class="text-capitalize ms-2"
:disabled="!isDeletable()"
outlined
- @click="dialogDelete=true"
+ @click="dialogDelete = true"
>
{{ $t('generic.delete') }}
</v-btn>
<v-dialog v-model="dialogCreate">
<config-creation-form
- @onCreate="onCreate();dialogCreate=false"
+ @onCreate="
+ onCreate()
+ dialogCreate = false
+ "
/>
</v-dialog>
<v-dialog v-model="dialogDelete">
@@ -36,8 +36,11 @@
title="Delete Config"
message="Are you sure you want to delete these configs?"
item-key="modelName"
- @ok="remove();dialogDelete=false"
- @cancel="dialogDelete=false"
+ @ok="
+ remove()
+ dialogDelete = false
+ "
+ @cancel="dialogDelete = false"
/>
</v-dialog>
</div>
diff --git a/frontend/components/configAutoLabeling/form/ConfigHeader.vue b/frontend/components/configAutoLabeling/form/ConfigHeader.vue
index 4d909550bd..6ccf117310 100644
--- a/frontend/components/configAutoLabeling/form/ConfigHeader.vue
+++ b/frontend/components/configAutoLabeling/form/ConfigHeader.vue
@@ -1,32 +1,12 @@
<template>
<v-stepper-header>
- <v-stepper-step
- :complete="step > 1"
- step="1"
- >
- Select a template
- </v-stepper-step>
+ <v-stepper-step :complete="step > 1" step="1"> Select a template </v-stepper-step>
<v-divider />
- <v-stepper-step
- :complete="step > 2"
- step="2"
- >
- Set parameters
- </v-stepper-step>
+ <v-stepper-step :complete="step > 2" step="2"> Set parameters </v-stepper-step>
<v-divider />
- <v-stepper-step
- :complete="step > 3"
- step="3"
- >
- Set a template
- </v-stepper-step>
+ <v-stepper-step :complete="step > 3" step="3"> Set a template </v-stepper-step>
<v-divider />
- <v-stepper-step
- :complete="step > 4"
- step="4"
- >
- Set mappings
- </v-stepper-step>
+ <v-stepper-step :complete="step > 4" step="4"> Set mappings </v-stepper-step>
</v-stepper-header>
</template>
diff --git a/frontend/components/configAutoLabeling/form/ConfigLabelMapping.vue b/frontend/components/configAutoLabeling/form/ConfigLabelMapping.vue
index 3950256e11..f18ebcb170 100644
--- a/frontend/components/configAutoLabeling/form/ConfigLabelMapping.vue
+++ b/frontend/components/configAutoLabeling/form/ConfigLabelMapping.vue
@@ -4,65 +4,33 @@
<v-card-text class="pa-0">
<h4 class="text-h6">Configure label mappings</h4>
<p class="font-weight-regular body-1">
- Once you fetch the API response, you need to convert the label in the response into the one which you defined at the label page.
+ Once you fetch the API response, you need to convert the label in the response into the
+ one which you defined at the label page.
</p>
- <h4 class="text-h6">
- Response
- </h4>
- <v-sheet
- :dark="!$vuetify.theme.dark"
- :light="$vuetify.theme.dark"
- class="mb-5 pa-5"
- >
+ <h4 class="text-h6">Response</h4>
+ <v-sheet :dark="!$vuetify.theme.dark" :light="$vuetify.theme.dark" class="mb-5 pa-5">
<pre>{{ JSON.stringify(response, null, 4) }}</pre>
</v-sheet>
<label-mapping v-model="mapping" />
- <v-alert
- v-for="(error, index) in errorMessages"
- :key="index"
- prominent
- type="error"
- >
+ <v-alert v-for="(error, index) in errorMessages" :key="index" prominent type="error">
<v-row align="center">
<v-col class="grow">
{{ error }}
</v-col>
</v-row>
</v-alert>
- <h4 class="text-h6">
- Result
- </h4>
- <v-sheet
- :dark="!$vuetify.theme.dark"
- :light="$vuetify.theme.dark"
- class="mb-5 pa-5"
- >
+ <h4 class="text-h6">Result</h4>
+ <v-sheet :dark="!$vuetify.theme.dark" :light="$vuetify.theme.dark" class="mb-5 pa-5">
<pre>{{ JSON.stringify(result, null, 4) }}</pre>
</v-sheet>
</v-card-text>
<v-card-actions class="pa-0">
<v-spacer />
- <v-btn
- text
- class="text-capitalize"
- @click="$emit('prev')"
- >
- Prev
- </v-btn>
- <v-btn
- v-show="!isPassed"
- color="primary"
- class="text-capitalize"
- @click="$emit('onTest')"
- >
+ <v-btn text class="text-capitalize" @click="$emit('prev')"> Prev </v-btn>
+ <v-btn v-show="!isPassed" color="primary" class="text-capitalize" @click="$emit('onTest')">
Test
</v-btn>
- <v-btn
- v-show="isPassed"
- color="success"
- class="text-capitalize"
- @click="$emit('next')"
- >
+ <v-btn v-show="isPassed" color="success" class="text-capitalize" @click="$emit('next')">
Finish
</v-btn>
</v-card-actions>
diff --git a/frontend/components/configAutoLabeling/form/ConfigParameters.vue b/frontend/components/configAutoLabeling/form/ConfigParameters.vue
index e779327b85..e216c124b9 100644
--- a/frontend/components/configAutoLabeling/form/ConfigParameters.vue
+++ b/frontend/components/configAutoLabeling/form/ConfigParameters.vue
@@ -4,9 +4,7 @@
<v-card-text class="pa-0">
<v-form>
<h4 class="text-h6">Set parameters</h4>
- <p class="font-weight-regular body-1">
- You can set parameters to fetch API response.
- </p>
+ <p class="font-weight-regular body-1">You can set parameters to fetch API response.</p>
<template v-for="item in value">
<v-text-field
v-if="item.type === 'textField'"
@@ -33,7 +31,8 @@
<h4 class="text-h6">Test the parameters</h4>
<p class="font-weight-regular body-1">
Before proceeding, you need to test the parameters whether they can fetch API response.
- Please input sample text and press the <strong>Test</strong> button.
+ Please input sample text and press the
+ <strong>Test</strong> button.
</p>
<v-text-field
v-if="project.isTextProject"
@@ -41,43 +40,23 @@
outlined
label="Sample Text"
/>
- <file-field
- v-else
- v-model="payload"
- />
- <v-alert
- v-for="(error, index) in errorMessages"
- :key="index"
- prominent
- type="error"
- >
+ <file-field v-else v-model="payload" />
+ <v-alert v-for="(error, index) in errorMessages" :key="index" prominent type="error">
<v-row align="center">
<v-col class="grow">
{{ error }}
</v-col>
</v-row>
</v-alert>
- <h4 class="text-h6">
- Response
- </h4>
- <v-sheet
- :dark="!$vuetify.theme.dark"
- :light="$vuetify.theme.dark"
- class="mb-5 pa-5"
- >
+ <h4 class="text-h6">Response</h4>
+ <v-sheet :dark="!$vuetify.theme.dark" :light="$vuetify.theme.dark" class="mb-5 pa-5">
<pre>{{ JSON.stringify(response, null, 4) }}</pre>
</v-sheet>
</v-form>
</v-card-text>
<v-card-actions class="pa-0">
<v-spacer />
- <v-btn
- text
- class="text-capitalize"
- @click="$emit('prev')"
- >
- Prev
- </v-btn>
+ <v-btn text class="text-capitalize" @click="$emit('prev')"> Prev </v-btn>
<v-btn
v-show="!isPassed"
color="primary"
@@ -86,12 +65,7 @@
>
Test
</v-btn>
- <v-btn
- v-show="isPassed"
- color="primary"
- class="text-capitalize"
- @click="$emit('next')"
- >
+ <v-btn v-show="isPassed" color="primary" class="text-capitalize" @click="$emit('next')">
Next
</v-btn>
</v-card-actions>
@@ -107,7 +81,7 @@ import FileField from './FileField.vue'
export default Vue.extend({
components: {
ObjectField,
- FileField,
+ FileField
},
props: {
diff --git a/frontend/components/configAutoLabeling/form/ConfigTemplate.vue b/frontend/components/configAutoLabeling/form/ConfigTemplate.vue
index bb4833a47c..99432a870d 100644
--- a/frontend/components/configAutoLabeling/form/ConfigTemplate.vue
+++ b/frontend/components/configAutoLabeling/form/ConfigTemplate.vue
@@ -4,41 +4,33 @@
<v-card-text class="pa-0">
<h4 class="text-h6">Set mapping template</h4>
<p class="font-weight-regular body-1">
- Now, you can successfuly fetch the API response.
- Next, you need to convert API response to doccano format with the mapping template.
+ Now, you can successfuly fetch the API response. Next, you need to convert API response to
+ doccano format with the mapping template.
</p>
- <h4 class="text-h6">
- Response
- </h4>
- <v-sheet
- :dark="!$vuetify.theme.dark"
- :light="$vuetify.theme.dark"
- class="mb-5 pa-5"
- >
+ <h4 class="text-h6">Response</h4>
+ <v-sheet :dark="!$vuetify.theme.dark" :light="$vuetify.theme.dark" class="mb-5 pa-5">
<pre>{{ JSON.stringify(response, null, 4) }}</pre>
</v-sheet>
- <h4 class="text-h6">
- doccano format
- </h4>
- <v-sheet
- :dark="!$vuetify.theme.dark"
- :light="$vuetify.theme.dark"
- class="mb-5 pa-5"
- >
+ <h4 class="text-h6">doccano format</h4>
+ <v-sheet :dark="!$vuetify.theme.dark" :light="$vuetify.theme.dark" class="mb-5 pa-5">
<pre>Text Classification</pre>
<pre>[{ "label": "Cat" }, ...]</pre>
- <br>
+ <br />
<pre>Sequence Labeling</pre>
<pre>[{ "label": "Cat", "start_offset": 0, "end_offset": 5 }, ...]</pre>
- <br>
+ <br />
<pre>Sequence to sequence</pre>
<pre>[{ "text": "Cat" }, ...]</pre>
</v-sheet>
<h4 class="text-h6">Mapping template</h4>
<p class="font-weight-regular body-1">
- You can set mapping template(<a href="https://jinja.palletsprojects.com/en/2.11.x/">Jinja2</a> format) to convert API response to doccano format.
- In the template, you can refer to the API response by the <strong>input</strong> variable.
- If you want to know the Jinja2 notation, please refer to the site.
+ You can set mapping template(<a href="https://jinja.palletsprojects.com/en/2.11.x/"
+ >Jinja2</a
+ >
+ format) to convert API response to doccano format. In the template, you can refer to the
+ API response by the
+ <strong>input</strong> variable. If you want to know the Jinja2 notation, please refer to
+ the site.
</p>
<v-textarea
:value="value"
@@ -46,52 +38,25 @@
label="Mapping Template"
@change="$emit('input', $event)"
/>
- <v-alert
- v-for="(error, index) in errorMessages"
- :key="index"
- prominent
- type="error"
- >
+ <v-alert v-for="(error, index) in errorMessages" :key="index" prominent type="error">
<v-row align="center">
<v-col class="grow">
{{ error }}
</v-col>
</v-row>
</v-alert>
- <h4 class="text-h6">
- Result
- </h4>
- <v-sheet
- :dark="!$vuetify.theme.dark"
- :light="$vuetify.theme.dark"
- class="mb-5 pa-5"
- >
+ <h4 class="text-h6">Result</h4>
+ <v-sheet :dark="!$vuetify.theme.dark" :light="$vuetify.theme.dark" class="mb-5 pa-5">
<pre>{{ JSON.stringify(result, null, 4) }}</pre>
</v-sheet>
</v-card-text>
<v-card-actions class="pa-0">
<v-spacer />
- <v-btn
- text
- class="text-capitalize"
- @click="$emit('prev')"
- >
- Prev
- </v-btn>
- <v-btn
- v-show="!isPassed"
- color="primary"
- class="text-capitalize"
- @click="$emit('onTest')"
- >
+ <v-btn text class="text-capitalize" @click="$emit('prev')"> Prev </v-btn>
+ <v-btn v-show="!isPassed" color="primary" class="text-capitalize" @click="$emit('onTest')">
Test
</v-btn>
- <v-btn
- v-show="isPassed"
- color="primary"
- class="text-capitalize"
- @click="$emit('next')"
- >
+ <v-btn v-show="isPassed" color="primary" class="text-capitalize" @click="$emit('next')">
Next
</v-btn>
</v-card-actions>
diff --git a/frontend/components/configAutoLabeling/form/ConfigTemplateName.vue b/frontend/components/configAutoLabeling/form/ConfigTemplateName.vue
index dad2bf0e02..ceb4f909c6 100644
--- a/frontend/components/configAutoLabeling/form/ConfigTemplateName.vue
+++ b/frontend/components/configAutoLabeling/form/ConfigTemplateName.vue
@@ -7,12 +7,7 @@
<p class="font-weight-regular body-1">
You can select the template to create the auto-labeling configuration.{{ valid }}
</p>
- <v-select
- v-model="selectedTask"
- :items="taskNames"
- label="Select a task name"
- outlined
- />
+ <v-select v-model="selectedTask" :items="taskNames" label="Select a task name" outlined />
<v-select
v-model="templateName"
:items="templateNames"
@@ -24,12 +19,7 @@
</v-card-text>
<v-card-actions class="pa-0">
<v-spacer />
- <v-btn
- :disabled="!valid"
- color="primary"
- class="text-capitalize"
- @click="$emit('next')"
- >
+ <v-btn :disabled="!valid" color="primary" class="text-capitalize" @click="$emit('next')">
Next
</v-btn>
</v-card-actions>
@@ -66,12 +56,12 @@ export default Vue.extend({
taskType(): string {
return {
DocumentClassification: 'Category',
- SequenceLabeling : 'Span',
- Seq2seq : 'Text',
- ImageClassification : 'Category',
- Speech2text : 'Text',
+ SequenceLabeling: 'Span',
+ Seq2seq: 'Text',
+ ImageClassification: 'Category',
+ Speech2text: 'Text'
}[this.selectedTask]!
- }
+ }
},
watch: {
diff --git a/frontend/components/configAutoLabeling/form/FileField.vue b/frontend/components/configAutoLabeling/form/FileField.vue
index 30a74e0f33..bb348504a0 100644
--- a/frontend/components/configAutoLabeling/form/FileField.vue
+++ b/frontend/components/configAutoLabeling/form/FileField.vue
@@ -13,16 +13,14 @@
<script>
import Cookies from 'js-cookie'
-import vueFilePond from "vue-filepond"
-import "filepond/dist/filepond.min.css"
-import FilePondPluginFileValidateType from "filepond-plugin-file-validate-type"
-const FilePond = vueFilePond(
- FilePondPluginFileValidateType,
-)
+import vueFilePond from 'vue-filepond'
+import 'filepond/dist/filepond.min.css'
+import FilePondPluginFileValidateType from 'filepond-plugin-file-validate-type'
+const FilePond = vueFilePond(FilePondPluginFileValidateType)
export default {
components: {
- FilePond,
+ FilePond
},
props: {
@@ -32,18 +30,18 @@ export default {
required: true
}
},
-
+
data() {
return {
myFiles: [],
server: {
url: '/v1/fp',
headers: {
- 'X-CSRFToken': Cookies.get('csrftoken'),
+ 'X-CSRFToken': Cookies.get('csrftoken')
},
process: {
url: '/process/',
- method: 'POST',
+ method: 'POST'
},
patch: '/patch/',
revert: '/revert/',
@@ -62,6 +60,6 @@ export default {
handleFilePondRemovefile() {
this.$emit('input', '')
}
- },
-};
+ }
+}
</script>
diff --git a/frontend/components/configAutoLabeling/form/LabelMapping.vue b/frontend/components/configAutoLabeling/form/LabelMapping.vue
index c3b08509d9..d6c48005fe 100644
--- a/frontend/components/configAutoLabeling/form/LabelMapping.vue
+++ b/frontend/components/configAutoLabeling/form/LabelMapping.vue
@@ -1,23 +1,9 @@
<template>
- <v-data-table
- :headers="headers"
- :items="value"
- >
+ <v-data-table :headers="headers" :items="value">
<template #top>
- <v-dialog
- v-model="dialog"
- max-width="800px"
- >
+ <v-dialog v-model="dialog" max-width="800px">
<template #activator="{ on, attrs }">
- <v-btn
- color="primary"
- dark
- class="text-none"
- v-bind="attrs"
- v-on="on"
- >
- Add
- </v-btn>
+ <v-btn color="primary" dark class="text-none" v-bind="attrs" v-on="on"> Add </v-btn>
</template>
<v-card>
<v-card-title>
@@ -26,16 +12,9 @@
<v-card-text>
<v-container>
- <v-form
- ref="form"
- v-model="valid"
- >
+ <v-form ref="form" v-model="valid">
<v-row>
- <v-col
- cols="12"
- sm="12"
- class="pa-0"
- >
+ <v-col cols="12" sm="12" class="pa-0">
<v-text-field
v-model="editedItem.from"
label="From"
@@ -43,11 +22,7 @@
outlined
/>
</v-col>
- <v-col
- cols="12"
- sm="12"
- class="pa-0"
- >
+ <v-col cols="12" sm="12" class="pa-0">
<v-select
v-model="editedItem.to"
:items="items"
@@ -63,12 +38,7 @@
<v-card-actions>
<v-spacer />
- <v-btn
- color="blue darken-1"
- class="text-capitalize"
- text
- @click="close"
- >
+ <v-btn color="blue darken-1" class="text-capitalize" text @click="close">
Cancel
</v-btn>
<v-btn
@@ -85,17 +55,10 @@
</v-dialog>
</template>
<template #[`item.actions`]="{ item }">
- <v-icon
- small
- class="mr-2"
- @click="editItem(item)"
- >
+ <v-icon small class="mr-2" @click="editItem(item)">
{{ mdiPencil }}
</v-icon>
- <v-icon
- small
- @click="deleteItem(item)"
- >
+ <v-icon small @click="deleteItem(item)">
{{ mdiDelete }}
</v-icon>
</template>
@@ -140,12 +103,12 @@ export default Vue.extend({
valid: false,
editedIndex: -1,
editedItem: {
- 'from': '',
- 'to': ''
+ from: '',
+ to: ''
},
defaultItem: {
- 'from': '',
- 'to': ''
+ from: '',
+ to: ''
},
items: [] as string[],
labelNameRules,
@@ -158,21 +121,21 @@ export default Vue.extend({
const project = await this.$services.project.findById(this.$route.params.id)
if (project.projectType.endsWith('Classification')) {
const labels = await this.$services.categoryType.list(this.$route.params.id)
- this.items = labels.map(item => item.text)
+ this.items = labels.map((item) => item.text)
} else {
const labels = await this.$services.spanType.list(this.$route.params.id)
- this.items = labels.map(item => item.text)
+ this.items = labels.map((item) => item.text)
}
},
methods: {
- editItem(item: {'from': string, 'to': string}) {
+ editItem(item: { from: string; to: string }) {
this.editedIndex = this.value.indexOf(item)
this.editedItem = Object.assign({}, item)
this.dialog = true
},
- deleteItem(item: {'from': string, 'to': string}) {
+ deleteItem(item: { from: string; to: string }) {
this.editedIndex = this.value.indexOf(item)
this.editedItem = Object.assign({}, item)
const items = Object.assign([], this.value)
diff --git a/frontend/components/configAutoLabeling/form/ObjectField.vue b/frontend/components/configAutoLabeling/form/ObjectField.vue
index f1c4c225b1..bce8f1c586 100644
--- a/frontend/components/configAutoLabeling/form/ObjectField.vue
+++ b/frontend/components/configAutoLabeling/form/ObjectField.vue
@@ -1,31 +1,14 @@
<template>
- <v-data-table
- :headers="headers"
- :items="value"
- >
+ <v-data-table :headers="headers" :items="value">
<template #top>
- <v-toolbar
- class="toolbar-control"
- flat
- >
+ <v-toolbar class="toolbar-control" flat>
<v-toolbar-title class="text-capitalize">
{{ title }}
</v-toolbar-title>
<v-spacer />
- <v-dialog
- v-model="dialog"
- max-width="800px"
- >
+ <v-dialog v-model="dialog" max-width="800px">
<template #activator="{ on, attrs }">
- <v-btn
- color="primary"
- dark
- class="text-none"
- v-bind="attrs"
- v-on="on"
- >
- Add
- </v-btn>
+ <v-btn color="primary" dark class="text-none" v-bind="attrs" v-on="on"> Add </v-btn>
</template>
<v-card>
<v-card-title>
@@ -34,32 +17,13 @@
<v-card-text>
<v-container>
- <v-form
- ref="form"
- v-model="valid"
- >
+ <v-form ref="form" v-model="valid">
<v-row>
- <v-col
- cols="12"
- sm="12"
- class="pa-0"
- >
- <v-text-field
- v-model="editedItem.key"
- label="Key"
- outlined
- />
+ <v-col cols="12" sm="12" class="pa-0">
+ <v-text-field v-model="editedItem.key" label="Key" outlined />
</v-col>
- <v-col
- cols="12"
- sm="12"
- class="pa-0"
- >
- <v-text-field
- v-model="editedItem.value"
- label="Value"
- outlined
- />
+ <v-col cols="12" sm="12" class="pa-0">
+ <v-text-field v-model="editedItem.value" label="Value" outlined />
</v-col>
</v-row>
</v-form>
@@ -68,12 +32,7 @@
<v-card-actions>
<v-spacer />
- <v-btn
- color="blue darken-1"
- class="text-capitalize"
- text
- @click="close"
- >
+ <v-btn color="blue darken-1" class="text-capitalize" text @click="close">
Cancel
</v-btn>
<v-btn
@@ -91,17 +50,10 @@
</v-toolbar>
</template>
<template #[`item.actions`]="{ item }">
- <v-icon
- small
- class="mr-2"
- @click="editItem(item)"
- >
+ <v-icon small class="mr-2" @click="editItem(item)">
{{ mdiPencil }}
</v-icon>
- <v-icon
- small
- @click="deleteItem(item)"
- >
+ <v-icon small @click="deleteItem(item)">
{{ mdiDelete }}
</v-icon>
</template>
@@ -113,7 +65,6 @@ import Vue from 'vue'
import { mdiPencil, mdiDelete } from '@mdi/js'
export default Vue.extend({
-
props: {
value: {
type: Array,
@@ -151,12 +102,12 @@ export default Vue.extend({
valid: false,
editedIndex: -1,
editedItem: {
- 'key': '',
- 'value': ''
+ key: '',
+ value: ''
},
defaultItem: {
- 'key': '',
- 'value': ''
+ key: '',
+ value: ''
},
items: [] as string[],
mdiPencil,
@@ -165,13 +116,13 @@ export default Vue.extend({
},
methods: {
- editItem(item: {'key': string, 'value': string}) {
+ editItem(item: { key: string; value: string }) {
this.editedIndex = this.value.indexOf(item)
this.editedItem = Object.assign({}, item)
this.dialog = true
},
- deleteItem(item: {'key': string, 'value': string}) {
+ deleteItem(item: { key: string; value: string }) {
this.editedIndex = this.value.indexOf(item)
this.editedItem = Object.assign({}, item)
const items = Object.assign([], this.value)
@@ -207,4 +158,4 @@ export default Vue.extend({
.toolbar-control >>> .v-toolbar__content {
padding: 0px !important;
}
-</style>
\ No newline at end of file
+</style>
diff --git a/frontend/components/example/ActionMenu.vue b/frontend/components/example/ActionMenu.vue
index d4df5a8636..33b88cc512 100644
--- a/frontend/components/example/ActionMenu.vue
+++ b/frontend/components/example/ActionMenu.vue
@@ -20,17 +20,17 @@ export default Vue.extend({
computed: {
items() {
- return [
- {
- title: this.$t('dataset.importDataset'),
- icon: mdiUpload,
- event: 'upload'
- },
- {
- title: this.$t('dataset.exportDataset'),
- icon: mdiDownload,
- event: 'download'
- }
+ return [
+ {
+ title: this.$t('dataset.importDataset'),
+ icon: mdiUpload,
+ event: 'upload'
+ },
+ {
+ title: this.$t('dataset.exportDataset'),
+ icon: mdiDownload,
+ event: 'download'
+ }
]
}
}
diff --git a/frontend/components/example/AudioList.vue b/frontend/components/example/AudioList.vue
index e81df83f87..332df0c196 100644
--- a/frontend/components/example/AudioList.vue
+++ b/frontend/components/example/AudioList.vue
@@ -10,7 +10,7 @@
:loading-text="$t('generic.loading')"
:no-data-text="$t('vuetify.noDataAvailable')"
:footer-props="{
- 'showFirstLastPage': true,
+ showFirstLastPage: true,
'items-per-page-options': [10, 50, 100],
'items-per-page-text': $t('vuetify.itemsPerPageText'),
'page-text': $t('dataset.pageText')
@@ -30,11 +30,7 @@
/>
</template>
<template #[`item.fileUrl`]="{ item }">
- <audio
- controls
- :src="item.fileUrl"
- class="mt-2"
- >
+ <audio controls :src="item.fileUrl" class="mt-2">
Your browser does not support the
<code>audio</code> element.
</audio>
@@ -46,11 +42,7 @@
<span> {{ item.commentCount }} </span>
</template>
<template #[`item.action`]="{ item }">
- <v-btn
- small
- color="primary text-capitalize"
- @click="toLabeling(item)"
- >
+ <v-btn small color="primary text-capitalize" @click="toLabeling(item)">
{{ $t('dataset.annotate') }}
</v-btn>
</template>
diff --git a/frontend/components/example/DocumentList.vue b/frontend/components/example/DocumentList.vue
index fd7c3aff1b..e4424b2923 100644
--- a/frontend/components/example/DocumentList.vue
+++ b/frontend/components/example/DocumentList.vue
@@ -10,7 +10,7 @@
:loading-text="$t('generic.loading')"
:no-data-text="$t('vuetify.noDataAvailable')"
:footer-props="{
- 'showFirstLastPage': true,
+ showFirstLastPage: true,
'items-per-page-options': [10, 50, 100],
'items-per-page-text': $t('vuetify.itemsPerPageText'),
'page-text': $t('dataset.pageText')
@@ -40,11 +40,7 @@
<span> {{ item.commentCount }} </span>
</template>
<template #[`item.action`]="{ item }">
- <v-btn
- small
- color="primary text-capitalize"
- @click="toLabeling(item)"
- >
+ <v-btn small color="primary text-capitalize" @click="toLabeling(item)">
{{ $t('dataset.annotate') }}
</v-btn>
</template>
diff --git a/frontend/components/example/FormDelete.vue b/frontend/components/example/FormDelete.vue
index 52e4219c85..5f5488d3ff 100644
--- a/frontend/components/example/FormDelete.vue
+++ b/frontend/components/example/FormDelete.vue
@@ -1,7 +1,7 @@
<template>
<confirm-form
:title="$t('dataset.deleteDocumentsTitle')"
- :message="$t('dataset.deleteDocumentsMessage', { 'number': selected.length })"
+ :message="$t('dataset.deleteDocumentsMessage', { number: selected.length })"
@ok="$emit('remove')"
@cancel="$emit('cancel')"
/>
diff --git a/frontend/components/example/ImageList.vue b/frontend/components/example/ImageList.vue
index 39afa3cc9c..b85e369da2 100644
--- a/frontend/components/example/ImageList.vue
+++ b/frontend/components/example/ImageList.vue
@@ -10,7 +10,7 @@
:loading-text="$t('generic.loading')"
:no-data-text="$t('vuetify.noDataAvailable')"
:footer-props="{
- 'showFirstLastPage': true,
+ showFirstLastPage: true,
'items-per-page-options': [10, 50, 100],
'items-per-page-text': $t('vuetify.itemsPerPageText'),
'page-text': $t('dataset.pageText')
@@ -46,11 +46,7 @@
<span> {{ item.commentCount }} </span>
</template>
<template #[`item.action`]="{ item }">
- <v-btn
- small
- color="primary text-capitalize"
- @click="toLabeling(item)"
- >
+ <v-btn small color="primary text-capitalize" @click="toLabeling(item)">
{{ $t('dataset.annotate') }}
</v-btn>
</template>
diff --git a/frontend/components/label/ActionMenu.vue b/frontend/components/label/ActionMenu.vue
index 008b512fb3..229ce73ffa 100644
--- a/frontend/components/label/ActionMenu.vue
+++ b/frontend/components/label/ActionMenu.vue
@@ -20,7 +20,7 @@ export default Vue.extend({
computed: {
items() {
- return [
+ return [
{
title: this.$t('labels.createLabel'),
icon: mdiPencil,
diff --git a/frontend/components/label/FormCreate.vue b/frontend/components/label/FormCreate.vue
index 69f216d0d6..5421ee8331 100644
--- a/frontend/components/label/FormCreate.vue
+++ b/frontend/components/label/FormCreate.vue
@@ -2,10 +2,7 @@
<v-card>
<v-card-title>Create a Label Type</v-card-title>
<v-card-text>
- <v-form
- ref="form"
- v-model="valid"
- >
+ <v-form ref="form" v-model="valid">
<v-row>
<v-col cols="12" sm="6">
<v-text-field
@@ -41,27 +38,18 @@
required
@input="$emit('update:backgroundColor', $event)"
/>
- <v-chip-group
- v-model="selectedColorIndex"
- column
- mandatory
- >
+ <v-chip-group v-model="selectedColorIndex" column mandatory>
<v-chip
v-for="color in predefinedColors"
:key="color"
:color="color"
filter
label
- style="height: 32px; width: 32px;"
+ style="height: 32px; width: 32px"
/>
<v-tooltip bottom>
<template #activator="{ on, attrs }">
- <v-chip
- label
- v-bind="attrs"
- v-on="on"
- @click="setRandomColor"
- >
+ <v-chip label v-bind="attrs" v-on="on" @click="setRandomColor">
<v-icon>{{ mdiReload }}</v-icon>
</v-chip>
</template>
@@ -74,17 +62,9 @@
<v-row>
<v-col>
<div class="title black--text mb-2">Preview</div>
- <v-chip
- :color="backgroundColor"
- :text-color="textColor"
- >
+ <v-chip :color="backgroundColor" :text-color="textColor">
{{ text }}
- <v-avatar
- v-if="suffixKey"
- right
- color="white"
- class="black--text font-weight-bold"
- >
+ <v-avatar v-if="suffixKey" right color="white" class="black--text font-weight-bold">
{{ suffixKey }}
</v-avatar>
</v-chip>
@@ -103,7 +83,7 @@
<script lang="ts">
import Vue, { PropType } from 'vue'
-import { mdiReload } from '@mdi/js';
+import { mdiReload } from '@mdi/js'
import { LabelDTO } from '~/services/application/label/labelData'
export default Vue.extend({
@@ -119,11 +99,11 @@ export default Vue.extend({
},
text: {
type: String,
- required: true,
+ required: true
},
backgroundColor: {
type: String,
- required: true,
+ required: true
},
suffixKey: {
type: String as () => string | null,
@@ -137,13 +117,17 @@ export default Vue.extend({
valid: false,
rules: {
required: (v: string) => !!v || 'Required',
- // @ts-ignore
- counter: (v: string) => (v && v.length <= 100) || this.$t('rules.labelNameRules').labelLessThan100Chars,
- // @ts-ignore
- nameDuplicated: (v: string) => (!this.isUsedName(v)) || this.$t('rules.labelNameRules').duplicated,
- // @ts-ignore
- keyDuplicated: (v: string) => (!this.isUsedSuffixKey(v)) || this.$t('rules.keyNameRules').duplicated,
- validColor: (v: string) => (/^#[0-9A-F]{6}$/i.test(v)) || 'This string is NOT a valid hex color.'
+ counter: (
+ v: string // @ts-ignore
+ ) => (v && v.length <= 100) || this.$t('rules.labelNameRules').labelLessThan100Chars,
+ nameDuplicated: (
+ v: string // @ts-ignore
+ ) => !this.isUsedName(v) || this.$t('rules.labelNameRules').duplicated,
+ keyDuplicated: (
+ v: string // @ts-ignore
+ ) => !this.isUsedSuffixKey(v) || this.$t('rules.keyNameRules').duplicated,
+ validColor: (v: string) =>
+ /^#[0-9A-F]{6}$/i.test(v) || 'This string is NOT a valid hex color.'
},
mdiReload
}
@@ -151,20 +135,36 @@ export default Vue.extend({
computed: {
availableSuffixKeys(): string[] {
- const usedSuffixKeys = this.items.map(item => item.suffixKey).filter(item => item !== this.suffixKey)
+ const usedSuffixKeys = this.items
+ .map((item) => item.suffixKey)
+ .filter((item) => item !== this.suffixKey)
const allSuffixKeys = '0123456789abcdefghijklmnopqrstuvwxyz'.split('')
- return allSuffixKeys.filter(item => !usedSuffixKeys.includes(item))
+ return allSuffixKeys.filter((item) => !usedSuffixKeys.includes(item))
},
predefinedColors(): string[] {
return [
- '#73D8FF', '#009CE0', '#0062B1',
- '#AEA1FF', '#7B64FF', '#653294',
- '#FDA1FF', '#FA28FF', '#AB149E',
- '#68CCCA', '#16A5A5', '#0C797D',
- '#A4DD00', '#68BC00', '#194D33',
- '#FCDC00', '#FCC400', '#FB9E00',
- '#F44E3B', '#D33115', '#9F0500'
+ '#73D8FF',
+ '#009CE0',
+ '#0062B1',
+ '#AEA1FF',
+ '#7B64FF',
+ '#653294',
+ '#FDA1FF',
+ '#FA28FF',
+ '#AB149E',
+ '#68CCCA',
+ '#16A5A5',
+ '#0C797D',
+ '#A4DD00',
+ '#68BC00',
+ '#194D33',
+ '#FCDC00',
+ '#FCC400',
+ '#FB9E00',
+ '#F44E3B',
+ '#D33115',
+ '#9F0500'
]
},
@@ -183,18 +183,18 @@ export default Vue.extend({
methods: {
isUsedName(text: string): boolean {
- return this.items.filter(item => item.id !== this.id && item.text === text).length > 0
+ return this.items.filter((item) => item.id !== this.id && item.text === text).length > 0
},
isUsedSuffixKey(key: string) {
if (key === null) {
return false
}
- return this.items.filter(item => item.id !== this.id && item.suffixKey === key).length > 0
+ return this.items.filter((item) => item.id !== this.id && item.suffixKey === key).length > 0
},
setRandomColor() {
- const maxVal = 0xFFFFFF
+ const maxVal = 0xffffff
const randomNumber = Math.floor(Math.random() * maxVal)
const randomString = randomNumber.toString(16)
const randColor = randomString.padStart(6, '0')
diff --git a/frontend/components/label/FormImport.vue b/frontend/components/label/FormImport.vue
index aa1d2e5a0d..d80f870d6d 100644
--- a/frontend/components/label/FormImport.vue
+++ b/frontend/components/label/FormImport.vue
@@ -2,10 +2,7 @@
<v-card>
<v-card-title v-text="$t('labels.importLabels')" />
<v-card-text>
- <v-form
- ref="form"
- v-model="valid"
- >
+ <v-form ref="form" v-model="valid">
<h3>{{ $t('labels.importMessage1') }}</h3>
<v-sheet
v-if="exampleFormat"
@@ -55,7 +52,7 @@ export default Vue.extend({
return {
file: null,
valid: false,
- uploadSingleFileRules,
+ uploadSingleFileRules
}
},
diff --git a/frontend/components/label/LabelList.vue b/frontend/components/label/LabelList.vue
index 74d568b2ce..e89419ea22 100644
--- a/frontend/components/label/LabelList.vue
+++ b/frontend/components/label/LabelList.vue
@@ -8,7 +8,7 @@
:loading-text="$t('generic.loading')"
:no-data-text="$t('vuetify.noDataAvailable')"
:footer-props="{
- 'showFirstLastPage': true,
+ showFirstLastPage: true,
'items-per-page-text': $t('vuetify.itemsPerPageText'),
'page-text': $t('dataset.pageText')
}"
@@ -35,10 +35,7 @@
</v-chip>
</template>
<template #[`item.actions`]="{ item }">
- <v-icon
- small
- @click="$emit('edit', item)"
- >
+ <v-icon small @click="$emit('edit', item)">
{{ mdiPencil }}
</v-icon>
</template>
@@ -80,10 +77,10 @@ export default Vue.extend({
computed: {
headers() {
return [
- { text: this.$t('generic.name'), value: 'text' },
+ { text: this.$t('generic.name'), value: 'text' },
{ text: this.$t('labels.shortkey'), value: 'suffixKey' },
- { text: this.$t('labels.color'), value: 'backgroundColor' },
- { text: 'Actions', value: 'actions', sortable: false },
+ { text: this.$t('labels.color'), value: 'backgroundColor' },
+ { text: 'Actions', value: 'actions', sortable: false }
]
}
}
diff --git a/frontend/components/layout/FeatureCard.vue b/frontend/components/layout/FeatureCard.vue
index 8c3515fa95..98b5e37b61 100644
--- a/frontend/components/layout/FeatureCard.vue
+++ b/frontend/components/layout/FeatureCard.vue
@@ -1,9 +1,6 @@
<template>
<v-card>
- <v-img
- :src="imageSrc"
- height="200px"
- />
+ <v-img :src="imageSrc" height="200px" />
<v-card-title primary-title class="layout justify-center">
<div class="headline text-xs-center font-weight-bold mb-2">
{{ title }}
diff --git a/frontend/components/layout/FeatureCards.vue b/frontend/components/layout/FeatureCards.vue
index 316f4e3b02..715cdb1da1 100644
--- a/frontend/components/layout/FeatureCards.vue
+++ b/frontend/components/layout/FeatureCards.vue
@@ -1,11 +1,6 @@
<template>
<v-container>
- <v-layout
- column
- wrap
- class="my-5"
- align-center
- >
+ <v-layout column wrap class="my-5" align-center>
<v-flex xs12 sm4 class="my-3">
<div class="text-xs-center">
<h2 class="headline">
@@ -16,12 +11,7 @@
<v-flex xs12>
<v-container grid-list-xl>
<v-layout wrap align-center>
- <v-flex
- v-for="(item, index) in featureCards"
- :key="index"
- xs12
- md4
- >
+ <v-flex v-for="(item, index) in featureCards" :key="index" xs12 md4>
<feature-card
:image-src="require(`~/assets/${item.imageSrc}`)"
:title="item.title"
diff --git a/frontend/components/layout/LocaleMenu.vue b/frontend/components/layout/LocaleMenu.vue
index c7d93a5207..0eba373591 100644
--- a/frontend/components/layout/LocaleMenu.vue
+++ b/frontend/components/layout/LocaleMenu.vue
@@ -1,21 +1,12 @@
<template>
- <v-menu
- open-on-hover
- offset-y
- >
+ <v-menu open-on-hover offset-y>
<template #activator="{ on }">
- <v-btn
- text
- v-on="on"
- >
+ <v-btn text v-on="on">
{{ $i18n.locale }}
<v-icon>{{ mdiMenuDown }}</v-icon>
</v-btn>
</template>
- <v-list
- v-for="locale in $i18n.locales"
- :key="locale.code"
- >
+ <v-list v-for="locale in $i18n.locales" :key="locale.code">
<nuxt-link
class="v-list-item v-list-item--link"
:class="$vuetify.theme.dark ? 'theme--dark' : 'theme--light'"
diff --git a/frontend/components/layout/TheBottomBanner.vue b/frontend/components/layout/TheBottomBanner.vue
index 6c39d53d8e..7da1136667 100644
--- a/frontend/components/layout/TheBottomBanner.vue
+++ b/frontend/components/layout/TheBottomBanner.vue
@@ -1,44 +1,19 @@
<template>
<section>
- <v-parallax
- :src="require(`~/assets/vbanner.jpg`)"
- height="400"
- dark
- >
+ <v-parallax :src="require(`~/assets/vbanner.jpg`)" height="400" dark>
<v-container>
- <v-layout
- wrap
- align-center
- justify-center
- class="white--text"
- >
- <v-flex
- xs12
- md7
- >
- <v-img
- :src="require(`~/assets/ner_demo.png`)"
- max-height="380"
- contain
- class="ma-5"
- />
+ <v-layout wrap align-center justify-center class="white--text">
+ <v-flex xs12 md7>
+ <v-img :src="require(`~/assets/ner_demo.png`)" max-height="380" contain class="ma-5" />
</v-flex>
- <v-flex
- xs12
- md5
- >
+ <v-flex xs12 md5>
<h1 class="mb-2 display-1 text-xs-center">
{{ $t('home.footerTitle') }}
</h1>
<div class="mt-4">
<v-menu open-on-hover offset-y>
<template #activator="{ on }">
- <v-btn
- class="blue lighten-2"
- dark
- large
- v-on="on"
- >
+ <v-btn class="blue lighten-2" dark large v-on="on">
{{ $t('home.demoDropDown') }}
<v-icon>{{ mdiMenuDown }}</v-icon>
</v-btn>
diff --git a/frontend/components/layout/TheColorModeSwitcher.vue b/frontend/components/layout/TheColorModeSwitcher.vue
index b9e4440e8f..cef3b17b89 100644
--- a/frontend/components/layout/TheColorModeSwitcher.vue
+++ b/frontend/components/layout/TheColorModeSwitcher.vue
@@ -1,9 +1,5 @@
<template>
- <v-btn
- icon
- fab
- @click="isDark=!isDark"
- >
+ <v-btn icon fab @click="isDark = !isDark">
<v-icon v-if="isDark">
{{ mdiMoonWaxingCrescent }}
</v-icon>
diff --git a/frontend/components/layout/TheFooter.vue b/frontend/components/layout/TheFooter.vue
index 9e2923e624..fa307fc849 100644
--- a/frontend/components/layout/TheFooter.vue
+++ b/frontend/components/layout/TheFooter.vue
@@ -1,20 +1,7 @@
<template>
- <v-footer
- color="primary lighten-1"
- padless
- >
- <v-layout
- justify-center
- wrap
- >
- <v-flex
- black
- lighten-2
- py-4
- text-center
- white--text
- xs12
- >
+ <v-footer color="primary lighten-1" padless>
+ <v-layout justify-center wrap>
+ <v-flex black lighten-2 py-4 text-center white--text xs12>
© {{ new Date().getFullYear() }} doccano
</v-flex>
</v-layout>
diff --git a/frontend/components/layout/TheHeader.vue b/frontend/components/layout/TheHeader.vue
index 84d6bb711d..9c67745844 100644
--- a/frontend/components/layout/TheHeader.vue
+++ b/frontend/components/layout/TheHeader.vue
@@ -1,27 +1,17 @@
<template>
- <v-app-bar
- app
- clipped-left
- >
+ <v-app-bar app clipped-left>
<slot name="leftDrawerIcon" />
- <nuxt-link
- v-if="!isAuthenticated"
- to="/"
- style="line-height:0;"
- >
- <img src="~/assets/icon.png" height="48">
+ <nuxt-link v-if="!isAuthenticated" to="/" style="line-height: 0">
+ <img src="~/assets/icon.png" height="48" />
</nuxt-link>
- <v-toolbar-title
- v-if="!isAuthenticated"
- class="ml-2 d-none d-sm-flex"
- >
+ <v-toolbar-title v-if="!isAuthenticated" class="ml-2 d-none d-sm-flex">
doccano
</v-toolbar-title>
<v-btn
v-if="isAuthenticated && isIndividualProject"
text
class="d-none d-sm-flex"
- style="text-transform:none"
+ style="text-transform: none"
>
<v-icon small class="mr-1">
{{ mdiHexagonMultiple }}
@@ -39,16 +29,9 @@
>
{{ $t('header.projects') }}
</v-btn>
- <v-menu
- v-if="!isAuthenticated"
- open-on-hover
- offset-y
- >
+ <v-menu v-if="!isAuthenticated" open-on-hover offset-y>
<template #activator="{ on }">
- <v-btn
- text
- v-on="on"
- >
+ <v-btn text v-on="on">
{{ $t('home.demoDropDown') }}
<v-icon>{{ mdiMenuDown }}</v-icon>
</v-btn>
@@ -63,17 +46,10 @@
</v-list-item>
</v-list>
</v-menu>
- <v-btn
- v-if="!isAuthenticated"
- outlined
- @click="$router.push(localePath('/auth'))"
- >
+ <v-btn v-if="!isAuthenticated" outlined @click="$router.push(localePath('/auth'))">
{{ $t('user.login') }}
</v-btn>
- <v-menu
- v-if="isAuthenticated"
- offset-y
- >
+ <v-menu v-if="isAuthenticated" offset-y>
<template #activator="{ on }">
<v-btn on icon v-on="on">
<v-icon>{{ mdiDotsVertical }}</v-icon>
@@ -83,12 +59,7 @@
<v-subheader>{{ getUsername }}</v-subheader>
<v-list-item>
<v-list-item-content>
- <v-switch
- :input-value="isRTL"
- :label="direction"
- class="ms-1"
- @change="toggleRTL"
- />
+ <v-switch :input-value="isRTL" :label="direction" class="ms-1" @change="toggleRTL" />
</v-list-item-content>
</v-list-item>
<v-list-item @click="signout">
@@ -124,10 +95,13 @@ export default {
{ title: this.$t('home.demoNER'), link: 'named-entity-recognition' },
{ title: this.$t('home.demoSent'), link: 'sentiment-analysis' },
{ title: this.$t('home.demoTranslation'), link: 'translation' },
- { title: 'Intent Detection and Slot Filling', link: 'intent-detection-and-slot-filling' },
+ {
+ title: 'Intent Detection and Slot Filling',
+ link: 'intent-detection-and-slot-filling'
+ },
{ title: this.$t('home.demoTextToSQL'), link: 'text-to-sql' },
{ title: 'Image Classification', link: 'image-classification' },
- { title: 'Speech to Text', link: 'speech-to-text' },
+ { title: 'Speech to Text', link: 'speech-to-text' }
],
mdiLogout,
mdiDotsVertical,
diff --git a/frontend/components/layout/TheSideBar.vue b/frontend/components/layout/TheSideBar.vue
index 3ea649d1b7..1890f615a2 100644
--- a/frontend/components/layout/TheSideBar.vue
+++ b/frontend/components/layout/TheSideBar.vue
@@ -1,19 +1,12 @@
<template>
<v-list dense>
- <v-btn
- color="ms-4 my-1 mb-2 primary text-capitalize"
- nuxt
- @click="toLabeling"
- >
+ <v-btn color="ms-4 my-1 mb-2 primary text-capitalize" nuxt @click="toLabeling">
<v-icon left>
{{ mdiPlayCircleOutline }}
</v-icon>
{{ $t('home.startAnnotation') }}
</v-btn>
- <v-list-item-group
- v-model="selected"
- mandatory
- >
+ <v-list-item-group v-model="selected" mandatory>
<v-list-item
v-for="(item, i) in filteredItems"
:key="i"
@@ -35,7 +28,17 @@
</template>
<script>
-import { mdiHome, mdiDatabase, mdiCog, mdiChartBar, mdiBookOpenOutline, mdiCommentAccountOutline, mdiLabel, mdiAccount, mdiPlayCircleOutline } from '@mdi/js'
+import {
+ mdiHome,
+ mdiDatabase,
+ mdiCog,
+ mdiChartBar,
+ mdiBookOpenOutline,
+ mdiCommentAccountOutline,
+ mdiLabel,
+ mdiAccount,
+ mdiPlayCircleOutline
+} from '@mdi/js'
export default {
props: {
@@ -121,7 +124,7 @@ export default {
isVisible: this.isProjectAdmin
}
]
- return items.filter(item => item.isVisible)
+ return items.filter((item) => item.isVisible)
}
},
diff --git a/frontend/components/layout/TheTopBanner.vue b/frontend/components/layout/TheTopBanner.vue
index c993e48121..9f2507e2b5 100644
--- a/frontend/components/layout/TheTopBanner.vue
+++ b/frontend/components/layout/TheTopBanner.vue
@@ -1,41 +1,22 @@
<template>
<section>
- <v-parallax
- :src="require(`~/assets/vbanner.jpg`)"
- height="400"
- dark
- >
- <v-layout
- wrap
- align-center
- justify-center
- class="white--text"
- >
+ <v-parallax :src="require(`~/assets/vbanner.jpg`)" height="400" dark>
+ <v-layout wrap align-center justify-center class="white--text">
<v-flex text-right class="mr-5">
- <img src="~/assets/icon.png" alt="doccano" height="200">
+ <img src="~/assets/icon.png" alt="doccano" height="200" />
</v-flex>
<v-flex>
<h1 class="mb-2 display-1 text-xs-center">
{{ $t('home.mainTitle') }}
</h1>
<div class="mt-4">
- <v-btn
- large
- outlined
- color="white"
- href="https://github.com/doccano/doccano"
- >
+ <v-btn large outlined color="white" href="https://github.com/doccano/doccano">
<v-icon left>
{{ mdiGithub }}
</v-icon>
GitHub
</v-btn>
- <v-btn
- class="blue lighten-2 ml-5"
- dark
- large
- :href="localePath('/auth')"
- >
+ <v-btn class="blue lighten-2 ml-5" dark large :href="localePath('/auth')">
{{ $t('home.getStarted') }}
</v-btn>
</div>
diff --git a/frontend/components/member/FormCreate.vue b/frontend/components/member/FormCreate.vue
index b3e9268d2b..7bd7378c5f 100644
--- a/frontend/components/member/FormCreate.vue
+++ b/frontend/components/member/FormCreate.vue
@@ -39,11 +39,7 @@
{{ $translateRole(props.item.rolename, $t('members.roles')) }}
</template>
</v-select>
- <v-alert
- v-show="errorMessage"
- prominent
- type="error"
- >
+ <v-alert v-show="errorMessage" prominent type="error">
<v-row align="center">
<v-col class="grow">
{{ errorMessage }}
@@ -67,7 +63,7 @@ export default Vue.extend({
components: {
BaseCard
},
-
+
props: {
value: {
type: Object as PropType<MemberDTO>,
@@ -87,8 +83,8 @@ export default Vue.extend({
roles: [] as RoleDTO[],
username: '',
rules: {
- userRequired: (v: UserDTO) => !!v && !!v.username || 'Required',
- roleRequired: (v: RoleDTO) => !!v && !!v.rolename || 'Required'
+ userRequired: (v: UserDTO) => (!!v && !!v.username) || 'Required',
+ roleRequired: (v: RoleDTO) => (!!v && !!v.rolename) || 'Required'
},
mdiAccount,
mdiCreditCardOutline
diff --git a/frontend/components/member/MemberList.vue b/frontend/components/member/MemberList.vue
index 67201275be..dee4733ec2 100644
--- a/frontend/components/member/MemberList.vue
+++ b/frontend/components/member/MemberList.vue
@@ -8,7 +8,7 @@
:loading-text="$t('generic.loading')"
:no-data-text="$t('vuetify.noDataAvailable')"
:footer-props="{
- 'showFirstLastPage': true,
+ showFirstLastPage: true,
'items-per-page-text': $t('vuetify.itemsPerPageText'),
'page-text': $t('dataset.pageText')
}"
@@ -30,10 +30,7 @@
{{ $translateRole(item.rolename, $t('members.roles')) }}
</template>
<template #[`item.actions`]="{ item }">
- <v-icon
- small
- @click="$emit('edit', item)"
- >
+ <v-icon small @click="$emit('edit', item)">
{{ mdiPencil }}
</v-icon>
</template>
@@ -78,6 +75,6 @@ export default Vue.extend({
{ text: 'Actions', value: 'actions', sortable: false }
]
}
- }
+ }
})
</script>
diff --git a/frontend/components/metrics/LabelDistribution.vue b/frontend/components/metrics/LabelDistribution.vue
index 965a7217a9..05184d0ce3 100644
--- a/frontend/components/metrics/LabelDistribution.vue
+++ b/frontend/components/metrics/LabelDistribution.vue
@@ -3,21 +3,12 @@
<v-card-title v-text="title" />
<v-divider />
<v-tabs show-arrows>
- <v-tab
- v-for="(value, user) in chartJSFormat"
- :key="user"
- class="text-capitalize"
- >
+ <v-tab v-for="(value, user) in chartJSFormat" :key="user" class="text-capitalize">
{{ user }}
</v-tab>
- <v-tab-item
- v-for="(value, user) in chartJSFormat"
- :key="user"
- >
+ <v-tab-item v-for="(value, user) in chartJSFormat" :key="user">
<v-card-text>
- <bar-chart
- :chart-data="value"
- />
+ <bar-chart :chart-data="value" />
</v-card-text>
</v-tab-item>
</v-tabs>
@@ -48,29 +39,35 @@ export default Vue.extend({
labelTypes: {
type: Array as PropType<LabelDTO[]>,
default: () => [],
- required: true,
- },
+ required: true
+ }
},
computed: {
- colorMapping(): {[text: string]: string} {
- return Object.fromEntries(this.labelTypes.map((labelType) => [labelType.text, labelType.backgroundColor]))
+ colorMapping(): { [text: string]: string } {
+ return Object.fromEntries(
+ this.labelTypes.map((labelType) => [labelType.text, labelType.backgroundColor])
+ )
},
chartJSFormat(): any {
- const data: {[user: string]: {labels: string[], datasets: any[]}} = {}
+ const data: { [user: string]: { labels: string[]; datasets: any[] } } = {}
for (const user in this.distribution) {
const labels = Object.keys(this.distribution[user])
labels.sort()
const counts = labels.map((label) => this.distribution[user][label])
- const colors = labels.map((label) => label in this.colorMapping ? this.colorMapping[label] : '#00d1b2')
+ const colors = labels.map((label) =>
+ label in this.colorMapping ? this.colorMapping[label] : '#00d1b2'
+ )
data[user] = {
labels,
- datasets: [{
- title: this.title,
- backgroundColor: colors,
- data: counts
- }]
+ datasets: [
+ {
+ title: this.title,
+ backgroundColor: colors,
+ data: counts
+ }
+ ]
}
}
return data
diff --git a/frontend/components/metrics/MemberProgress.vue b/frontend/components/metrics/MemberProgress.vue
index 8b5263e49d..04d23297d6 100644
--- a/frontend/components/metrics/MemberProgress.vue
+++ b/frontend/components/metrics/MemberProgress.vue
@@ -3,11 +3,7 @@
<v-card-title>Member's Progress</v-card-title>
<v-divider />
<v-card-text>
- <div
- v-for="(item, index) in stats.progress"
- :key="index"
- class="mb-2"
- >
+ <div v-for="(item, index) in stats.progress" :key="index" class="mb-2">
<span class="font-weight-medium">{{ item.user }}</span>
<span class="font-weight-medium">{{ item.done }} / {{ stats.total }}</span>
<v-progress-linear :value="rate(item.done, stats.total)" />
@@ -31,7 +27,7 @@ export default Vue.extend({
methods: {
rate(done: number, total: number) {
- return done / total * 100
+ return (done / total) * 100
}
}
})
diff --git a/frontend/components/project/FormCreate.vue b/frontend/components/project/FormCreate.vue
index c722ce3100..9014a86d32 100644
--- a/frontend/components/project/FormCreate.vue
+++ b/frontend/components/project/FormCreate.vue
@@ -11,11 +11,7 @@
<v-row no-gutters>
<v-col v-for="(item, i) in projectTypes" :key="i">
<v-item v-slot="{ active, toggle }">
- <v-card
- class="mb-6 me-6"
- max-width="350"
- outlined
- >
+ <v-card class="mb-6 me-6" max-width="350" outlined>
<v-img
:src="require(`~/assets/images/tasks/${images[i]}`)"
height="200"
@@ -33,7 +29,7 @@
</v-col>
</v-row>
</v-item-group>
-
+
<v-text-field
:value="name"
:rules="projectNameRules($t('rules.projectNameRules'))"
@@ -98,12 +94,7 @@
Count
<v-tooltip bottom>
<template #activator="{ on }">
- <a
- target="_blank"
- href="https://unicode.org/reports/tr29/"
- @click.stop
- v-on="on"
- >
+ <a target="_blank" href="https://unicode.org/reports/tr29/" @click.stop v-on="on">
grapheme clusters
</a>
</template>
@@ -190,7 +181,7 @@ export default Vue.extend({
},
tags: {
type: Array,
- default: () => [],
+ default: () => []
}
},
@@ -201,7 +192,7 @@ export default Vue.extend({
projectTypeRules,
descriptionRules,
mdiCheckBold,
- selected: 0,
+ selected: 0
}
},
@@ -213,7 +204,7 @@ export default Vue.extend({
'Seq2seq',
'IntentDetectionAndSlotFilling',
'ImageClassification',
- 'Speech2text',
+ 'Speech2text'
]
},
images() {
@@ -227,10 +218,7 @@ export default Vue.extend({
]
},
hasSingleLabelOption() {
- return [
- 'DocumentClassification',
- 'ImageClassification',
- ].includes(this.projectType)
+ return ['DocumentClassification', 'ImageClassification'].includes(this.projectType)
},
isSequenceLabelingProject() {
return this.projectType === 'SequenceLabeling'
@@ -239,7 +227,7 @@ export default Vue.extend({
methods: {
updateValue(key: string, value: string) {
- this.$emit(`update:${key}`, value);
+ this.$emit(`update:${key}`, value)
},
translateTypeName(type: string, types: string[]): string {
const index = this.projectTypes.indexOf(type)
diff --git a/frontend/components/project/FormUpdate.vue b/frontend/components/project/FormUpdate.vue
index 1a87d05765..72d003a575 100644
--- a/frontend/components/project/FormUpdate.vue
+++ b/frontend/components/project/FormUpdate.vue
@@ -1,15 +1,9 @@
<template>
<v-card>
<v-card-text v-if="isReady">
- <v-form
- ref="form"
- v-model="valid"
- >
+ <v-form ref="form" v-model="valid">
<v-row>
- <v-col
- cols="12"
- sm="6"
- >
+ <v-col cols="12" sm="6">
<h3>Name</h3>
<v-text-field
v-model="project.name"
@@ -19,10 +13,7 @@
single-line
/>
</v-col>
- <v-col
- cols="12"
- sm="6"
- >
+ <v-col cols="12" sm="6">
<v-btn
v-if="!edit.name"
outlined
@@ -53,10 +44,7 @@
</v-col>
</v-row>
<v-row>
- <v-col
- cols="12"
- sm="6"
- >
+ <v-col cols="12" sm="6">
<h3>Description</h3>
<v-text-field
v-model="project.description"
@@ -66,10 +54,7 @@
single-line
/>
</v-col>
- <v-col
- cols="12"
- sm="6"
- >
+ <v-col cols="12" sm="6">
<v-btn
v-if="!edit.desc"
outlined
@@ -100,32 +85,28 @@
</v-col>
</v-row>
<v-row>
- <v-col
- cols="12"
- sm="6"
- >
+ <v-col cols="12" sm="6">
<h3>Tags</h3>
<v-chip
v-for="tag in tags"
:key="tag.id"
close
outlined
- @click:close="removeTag(tag.id)">{{tag.text}}
+ @click:close="removeTag(tag.id)"
+ >{{ tag.text }}
</v-chip>
<v-text-field
v-model="tagInput"
clearable
:prepend-icon="mdiPlusCircle"
@keyup.enter="addTag()"
- @click:prepend="addTag()">
+ @click:prepend="addTag()"
+ >
</v-text-field>
</v-col>
</v-row>
<v-row>
- <v-col
- cols="12"
- sm="6"
- >
+ <v-col cols="12" sm="6">
<h3>Shuffle</h3>
<v-checkbox
v-model="project.enableRandomOrder"
@@ -134,10 +115,7 @@
</v-col>
</v-row>
<v-row>
- <v-col
- cols="12"
- sm="6"
- >
+ <v-col cols="12" sm="6">
<h3>Collaboration</h3>
<v-checkbox
v-model="project.enableShareAnnotation"
@@ -155,7 +133,6 @@ import { mdiPlusCircle } from '@mdi/js'
import { projectNameRules, descriptionRules } from '@/rules/index'
export default {
-
data() {
return {
project: {},
@@ -197,7 +174,9 @@ export default {
methods: {
initEdit() {
- Object.keys(this.edit).forEach((v) => { this.edit[v] = false })
+ Object.keys(this.edit).forEach((v) => {
+ this.edit[v] = false
+ })
},
editProject(name) {
@@ -229,19 +208,19 @@ export default {
return this.$refs.form.validate()
},
- async getTags(){
- this.tags = await this.$services.tag.list(this.projectId)
+ async getTags() {
+ this.tags = await this.$services.tag.list(this.projectId)
},
- addTag(){
+ addTag() {
this.$services.tag.create(this.projectId, this.tagInput)
this.tagInput = ''
this.getTags()
},
- removeTag(id){
+ removeTag(id) {
this.$services.tag.delete(this.projectId, id)
- this.tags = this.tags.filter(tag => tag.id !== id)
+ this.tags = this.tags.filter((tag) => tag.id !== id)
}
}
}
diff --git a/frontend/components/project/ProjectList.vue b/frontend/components/project/ProjectList.vue
index 7e129cbdb9..07379e2179 100644
--- a/frontend/components/project/ProjectList.vue
+++ b/frontend/components/project/ProjectList.vue
@@ -10,7 +10,7 @@
:loading-text="$t('generic.loading')"
:no-data-text="$t('vuetify.noDataAvailable')"
:footer-props="{
- 'showFirstLastPage': true,
+ showFirstLastPage: true,
'items-per-page-options': [10, 50, 100],
'items-per-page-text': $t('vuetify.itemsPerPageText'),
'page-text': $t('dataset.pageText')
@@ -35,14 +35,12 @@
</nuxt-link>
</template>
<template #[`item.updatedAt`]="{ item }">
- <span>{{ item.updatedAt | dateParse('YYYY-MM-DDTHH:mm:ss') | dateFormat('DD/MM/YYYY HH:mm') }}</span>
+ <span>{{
+ item.updatedAt | dateParse('YYYY-MM-DDTHH:mm:ss') | dateFormat('DD/MM/YYYY HH:mm')
+ }}</span>
</template>
<template #[`item.tags`]="{ item }">
- <v-chip
- v-for="tag in item.tags"
- :key="tag.id"
- outlined v-text="tag.text"
- />
+ <v-chip v-for="tag in item.tags" :key="tag.id" outlined v-text="tag.text" />
</template>
</v-data-table>
</template>
@@ -96,7 +94,7 @@ export default Vue.extend({
{ text: this.$t('generic.description'), value: 'description' },
{ text: this.$t('generic.type'), value: 'projectType' },
{ text: 'Updated', value: 'updatedAt' },
- { text: 'Tags', value: 'tags'}
+ { text: 'Tags', value: 'tags' }
]
}
},
diff --git a/frontend/components/tasks/audio/AudioViewer.vue b/frontend/components/tasks/audio/AudioViewer.vue
index 240d8b47d8..2920679d55 100644
--- a/frontend/components/tasks/audio/AudioViewer.vue
+++ b/frontend/components/tasks/audio/AudioViewer.vue
@@ -1,11 +1,7 @@
<template>
<div>
<div id="waveform" />
- <v-row
- no-gutters
- align="center"
- class="mb-3 mt-1"
- >
+ <v-row no-gutters align="center" class="mb-3 mt-1">
<v-col md="8">
<v-slider
v-model="zoom"
@@ -43,21 +39,11 @@
/>
</v-col>
</v-row>
- <v-btn
- color="primary"
- class="text-capitalize"
- @click="play"
- >
- <v-icon
- v-if="!isPlaying"
- left
- >
+ <v-btn color="primary" class="text-capitalize" @click="play">
+ <v-icon v-if="!isPlaying" left>
{{ mdiPlayCircleOutline }}
</v-icon>
- <v-icon
- v-else
- left
- >
+ <v-icon v-else left>
{{ mdiPauseCircleOutline }}
</v-icon>
<span v-if="!isPlaying">Play</span>
@@ -69,7 +55,13 @@
<script>
import Vue from 'vue'
import WaveSurfer from 'wavesurfer.js'
-import { mdiPlayCircleOutline, mdiPauseCircleOutline, mdiVolumeHigh, mdiMagnifyPlusOutline, mdiMagnifyMinusOutline } from '@mdi/js'
+import {
+ mdiPlayCircleOutline,
+ mdiPauseCircleOutline,
+ mdiVolumeHigh,
+ mdiMagnifyPlusOutline,
+ mdiMagnifyMinusOutline
+} from '@mdi/js'
export default Vue.extend({
props: {
@@ -105,26 +97,26 @@ export default Vue.extend({
mounted() {
this.wavesurfer = WaveSurfer.create({
- container: '#waveform',
- backend: "MediaElement"
+ container: '#waveform',
+ backend: 'MediaElement'
})
this.load()
},
methods: {
load() {
- this.wavesurfer.load(this.source)
+ this.wavesurfer.load(this.source)
},
play() {
this.isPlaying = !this.isPlaying
- this.wavesurfer.playPause()
+ this.wavesurfer.playPause()
},
zoomOut() {
- this.zoom = (this.zoom - 10) || 0
+ this.zoom = this.zoom - 10 || 0
this.onChangeZoom(this.zoom)
},
zoomIn() {
- this.zoom = (this.zoom + 10) || 500
+ this.zoom = this.zoom + 10 || 500
this.onChangeZoom(this.zoom)
},
onChangeVolume(value) {
diff --git a/frontend/components/tasks/seq2seq/Seq2seqBox.vue b/frontend/components/tasks/seq2seq/Seq2seqBox.vue
index 7c3c2dd84e..5b374bc526 100644
--- a/frontend/components/tasks/seq2seq/Seq2seqBox.vue
+++ b/frontend/components/tasks/seq2seq/Seq2seqBox.vue
@@ -26,7 +26,7 @@
</template>
<template #[`item.text`]="{ item }">
<v-edit-dialog>
- <span class="title" style="font-weight:400">
+ <span class="title" style="font-weight: 400">
{{ item.text }}
</span>
<template #input>
@@ -40,10 +40,7 @@
</v-edit-dialog>
</template>
<template #[`item.action`]="{ item }">
- <v-icon
- small
- @click="remove(item.id)"
- >
+ <v-icon small @click="remove(item.id)">
{{ mdiDeleteOutline }}
</v-icon>
</template>
@@ -59,7 +56,7 @@ export default Vue.extend({
props: {
annotations: {
type: Array,
- default: () => ([]),
+ default: () => [],
required: true
}
},
diff --git a/frontend/components/tasks/sequenceLabeling/EntityEditor.vue b/frontend/components/tasks/sequenceLabeling/EntityEditor.vue
index 092d45d574..0b709b20ec 100644
--- a/frontend/components/tasks/sequenceLabeling/EntityEditor.vue
+++ b/frontend/components/tasks/sequenceLabeling/EntityEditor.vue
@@ -48,59 +48,59 @@ import 'vue-virtual-scroller/dist/vue-virtual-scroller.css'
export default Vue.extend({
components: {
VAnnotator,
- LabelingMenu,
+ LabelingMenu
},
props: {
dark: {
type: Boolean,
- default: false,
+ default: false
},
rtl: {
type: Boolean,
- default: false,
+ default: false
},
text: {
type: String,
- default: "",
- required: true,
+ default: '',
+ required: true
},
entities: {
type: Array as PropType<SpanDTO[]>,
default: () => [],
- required: true,
+ required: true
},
entityLabels: {
type: Array,
default: () => [],
- required: true,
+ required: true
},
relations: {
type: Array,
- default: () => [],
+ default: () => []
},
relationLabels: {
type: Array,
- default: () => [],
+ default: () => []
},
allowOverlapping: {
type: Boolean,
default: false,
- required: false,
+ required: false
},
graphemeMode: {
type: Boolean,
- default: false,
+ default: false
},
selectedLabel: {
type: Object,
default: null,
- required: false,
+ required: false
},
relationMode: {
type: Boolean,
- default: false,
- },
+ default: false
+ }
},
data() {
@@ -113,8 +113,8 @@ export default Vue.extend({
endOffset: 0,
entity: null as any,
relation: null as any,
- selectedEntities: [] as SpanDTO[],
- };
+ selectedEntities: [] as SpanDTO[]
+ }
},
computed: {
@@ -267,12 +267,12 @@ export default Vue.extend({
},
updateRelation(labelId: number) {
- this.$emit("click:relation", this.relation.id, labelId)
+ this.$emit('click:relation', this.relation.id, labelId)
},
deleteRelation(relation: any) {
this.$emit('contextmenu:relation', relation.id)
}
- },
-});
+ }
+})
</script>
diff --git a/frontend/components/tasks/sequenceLabeling/LabelingMenu.vue b/frontend/components/tasks/sequenceLabeling/LabelingMenu.vue
index 850cfef21f..7aba88c3b8 100644
--- a/frontend/components/tasks/sequenceLabeling/LabelingMenu.vue
+++ b/frontend/components/tasks/sequenceLabeling/LabelingMenu.vue
@@ -1,18 +1,6 @@
<template>
- <v-menu
- :value="opened"
- :position-x="x"
- :position-y="y"
- absolute
- offset-y
- @input="close"
- >
- <v-list
- dense
- min-width="150"
- max-height="400"
- class="overflow-y-auto"
- >
+ <v-menu :value="opened" :position-x="x" :position-y="y" absolute offset-y @input="close">
+ <v-list dense min-width="150" max-height="400" class="overflow-y-auto">
<v-list-item>
<v-autocomplete
ref="autocomplete"
@@ -28,14 +16,8 @@
small-chips
/>
</v-list-item>
- <v-list-item
- v-for="(label, i) in labels"
- :key="i"
- @click="onLabelSelected(label.id)"
- >
- <v-list-item-action
- v-if="hasAnySuffixKey"
- >
+ <v-list-item v-for="(label, i) in labels" :key="i" @click="onLabelSelected(label.id)">
+ <v-list-item-action v-if="hasAnySuffixKey">
<v-chip
v-if="label.suffixKey"
:color="label.backgroundColor"
@@ -46,7 +28,7 @@
<span v-else class="mr-8" />
</v-list-item-action>
<v-list-item-content>
- <v-list-item-title v-text="label.text"/>
+ <v-list-item-title v-text="label.text" />
</v-list-item-content>
</v-list-item>
</v-list>
@@ -60,27 +42,27 @@ export default Vue.extend({
labels: {
type: Array,
default: () => [],
- required: true,
+ required: true
},
opened: {
type: Boolean,
default: false,
- required: true,
+ required: true
},
selectedLabel: {
type: Object,
default: null,
- required: false,
+ required: false
},
x: {
type: Number,
default: 0,
- required: true,
+ required: true
},
y: {
type: Number,
default: 0,
- required: true,
+ required: true
}
},
@@ -90,8 +72,8 @@ export default Vue.extend({
endOffset: 0,
entity: null as any,
fromEntity: null as any,
- toEntity: null as any,
- };
+ toEntity: null as any
+ }
},
computed: {
@@ -115,7 +97,7 @@ export default Vue.extend({
// https://github.com/vuetifyjs/vuetify/issues/10765
this.$nextTick(() => {
if (this.$refs.autocomplete) {
- (this.$refs.autocomplete as any).selectedItems = []
+ ;(this.$refs.autocomplete as any).selectedItems = []
}
})
this.$emit('close')
diff --git a/frontend/components/tasks/sidebar/AnnotationProgress.vue b/frontend/components/tasks/sidebar/AnnotationProgress.vue
index 6fb8f848d1..11bd4b7df2 100644
--- a/frontend/components/tasks/sidebar/AnnotationProgress.vue
+++ b/frontend/components/tasks/sidebar/AnnotationProgress.vue
@@ -12,11 +12,7 @@
<v-list-item-subtitle class="text-right" v-text="progress.complete" />
</v-list-item>
</v-list>
- <v-progress-linear
- :value="percentage"
- color="success"
- height="25"
- >
+ <v-progress-linear :value="percentage" color="success" height="25">
<template #default="{ value }">
<strong>{{ value }}%</strong>
</template>
@@ -34,12 +30,12 @@ export default Vue.extend({
progress: {
type: Object as PropType<MyProgress>,
required: true
- },
+ }
},
computed: {
percentage(): number {
- return Math.ceil(this.progress.complete / this.progress.total * 100)
+ return Math.ceil((this.progress.complete / this.progress.total) * 100)
}
}
})
diff --git a/frontend/components/tasks/textClassification/LabelGroup.vue b/frontend/components/tasks/textClassification/LabelGroup.vue
index cc6dbbee0f..b7a504bfc1 100644
--- a/frontend/components/tasks/textClassification/LabelGroup.vue
+++ b/frontend/components/tasks/textClassification/LabelGroup.vue
@@ -33,7 +33,7 @@ export default {
},
annotations: {
type: Array,
- default: () => ([]),
+ default: () => [],
required: true
},
singleLabel: {
diff --git a/frontend/components/tasks/textClassification/LabelSelect.vue b/frontend/components/tasks/textClassification/LabelSelect.vue
index 0a70cda113..cb7620fcbd 100644
--- a/frontend/components/tasks/textClassification/LabelSelect.vue
+++ b/frontend/components/tasks/textClassification/LabelSelect.vue
@@ -33,7 +33,7 @@ export default {
},
annotations: {
type: Array,
- default: () => ([]),
+ default: () => [],
required: true
},
singleLabel: {
diff --git a/frontend/components/tasks/textClassification/multiLabel/LabelGroup.vue b/frontend/components/tasks/textClassification/multiLabel/LabelGroup.vue
index a28f118e6a..b3e637a443 100644
--- a/frontend/components/tasks/textClassification/multiLabel/LabelGroup.vue
+++ b/frontend/components/tasks/textClassification/multiLabel/LabelGroup.vue
@@ -1,10 +1,5 @@
<template>
- <v-chip-group
- :value="annotatedLabel"
- column
- multiple
- @change="addOrRemove"
- >
+ <v-chip-group :value="annotatedLabel" column multiple @change="addOrRemove">
<v-chip
v-for="item in labels"
:key="item.id"
@@ -13,12 +8,7 @@
:text-color="$contrastColor(item.backgroundColor)"
>
{{ item.text }}
- <v-avatar
- v-if="item.suffixKey"
- right
- color="white"
- class="black--text font-weight-bold"
- >
+ <v-avatar v-if="item.suffixKey" right color="white" class="black--text font-weight-bold">
{{ item.suffixKey }}
</v-avatar>
</v-chip>
@@ -37,15 +27,15 @@ export default {
},
annotations: {
type: Array,
- default: () => ([]),
+ default: () => [],
required: true
}
},
computed: {
annotatedLabel() {
- const labelIds = this.annotations.map(item => item.label)
- return labelIds.map(id => this.labels.findIndex(item => item.id === id))
+ const labelIds = this.annotations.map((item) => item.label)
+ return labelIds.map((id) => this.labels.findIndex((item) => item.id === id))
}
},
@@ -67,7 +57,7 @@ export default {
},
remove(label) {
- const annotation = this.annotations.find(item => item.label === label.id)
+ const annotation = this.annotations.find((item) => item.label === label.id)
this.$emit('remove', annotation.id)
}
}
diff --git a/frontend/components/tasks/textClassification/multiLabel/LabelSelect.vue b/frontend/components/tasks/textClassification/multiLabel/LabelSelect.vue
index 55a08e6eea..6842174b8a 100644
--- a/frontend/components/tasks/textClassification/multiLabel/LabelSelect.vue
+++ b/frontend/components/tasks/textClassification/multiLabel/LabelSelect.vue
@@ -9,7 +9,7 @@
multiple
class="pt-0"
:search-input.sync="search"
- @change="search=''"
+ @change="search = ''"
>
<template #selection="{ attrs, item, select, selected }">
<v-chip
@@ -21,28 +21,15 @@
@click="select"
@click:close="remove(item)"
>
- <v-avatar
- v-if="item.suffixKey"
- left
- color="white"
- class="black--text font-weight-bold"
- >
+ <v-avatar v-if="item.suffixKey" left color="white" class="black--text font-weight-bold">
{{ item.suffixKey }}
</v-avatar>
{{ item.text }}
</v-chip>
</template>
<template #item="{ item }">
- <v-chip
- :color="item.backgroundColor"
- :text-color="$contrastColor(item.backgroundColor)"
- >
- <v-avatar
- v-if="item.suffixKey"
- left
- color="white"
- class="black--text font-weight-bold"
- >
+ <v-chip :color="item.backgroundColor" :text-color="$contrastColor(item.backgroundColor)">
+ <v-avatar v-if="item.suffixKey" left color="white" class="black--text font-weight-bold">
{{ item.suffixKey }}
</v-avatar>
{{ item.text }}
@@ -61,7 +48,7 @@ export default {
},
annotations: {
type: Array,
- default: () => ([]),
+ default: () => [],
required: true
}
},
@@ -75,8 +62,8 @@ export default {
computed: {
annotatedLabels: {
get() {
- const labelIds = this.annotations.map(item => item.label)
- return this.labels.filter(item => labelIds.includes(item.id))
+ const labelIds = this.annotations.map((item) => item.label)
+ return this.labels.filter((item) => labelIds.includes(item.id))
},
set(newValue) {
if (newValue.length > this.annotations.length) {
@@ -87,7 +74,7 @@ export default {
newValue.pop()
}
} else {
- const label = this.annotatedLabels.find(x => !newValue.some(y => y.id === x.id))
+ const label = this.annotatedLabels.find((x) => !newValue.some((y) => y.id === x.id))
if (typeof label === 'object') {
this.remove(label)
}
@@ -102,7 +89,7 @@ export default {
},
remove(label) {
- const annotation = this.annotations.find(item => item.label === label.id)
+ const annotation = this.annotations.find((item) => item.label === label.id)
this.$emit('remove', annotation.id)
}
}
diff --git a/frontend/components/tasks/textClassification/singleLabel/LabelGroup.vue b/frontend/components/tasks/textClassification/singleLabel/LabelGroup.vue
index dbe45d688f..74a30acf4b 100644
--- a/frontend/components/tasks/textClassification/singleLabel/LabelGroup.vue
+++ b/frontend/components/tasks/textClassification/singleLabel/LabelGroup.vue
@@ -1,9 +1,5 @@
<template>
- <v-chip-group
- :value="annotatedLabel"
- column
- @change="addOrRemove"
- >
+ <v-chip-group :value="annotatedLabel" column @change="addOrRemove">
<v-chip
v-for="item in labels"
:key="item.id"
@@ -12,12 +8,7 @@
:text-color="$contrastColor(item.backgroundColor)"
>
{{ item.text }}
- <v-avatar
- v-if="item.suffixKey"
- right
- color="white"
- class="black--text font-weight-bold"
- >
+ <v-avatar v-if="item.suffixKey" right color="white" class="black--text font-weight-bold">
{{ item.suffixKey }}
</v-avatar>
</v-chip>
@@ -34,15 +25,15 @@ export default {
},
annotations: {
type: Array,
- default: () => ([]),
+ default: () => [],
required: true
}
},
computed: {
annotatedLabel() {
- const labelIds = this.annotations.map(item => item.label)
- return this.labels.findIndex(item => labelIds.includes(item.id))
+ const labelIds = this.annotations.map((item) => item.label)
+ return this.labels.findIndex((item) => labelIds.includes(item.id))
}
},
@@ -62,7 +53,7 @@ export default {
},
remove(label) {
- const annotation = this.annotations.find(item => item.label === label.id)
+ const annotation = this.annotations.find((item) => item.label === label.id)
this.$emit('remove', annotation.id)
}
}
diff --git a/frontend/components/tasks/textClassification/singleLabel/LabelSelect.vue b/frontend/components/tasks/textClassification/singleLabel/LabelSelect.vue
index 83ee95167d..506794786a 100644
--- a/frontend/components/tasks/textClassification/singleLabel/LabelSelect.vue
+++ b/frontend/components/tasks/textClassification/singleLabel/LabelSelect.vue
@@ -21,28 +21,15 @@
@click="select"
@click:close="remove(item)"
>
- <v-avatar
- v-if="item.suffixKey"
- left
- color="white"
- class="black--text font-weight-bold"
- >
+ <v-avatar v-if="item.suffixKey" left color="white" class="black--text font-weight-bold">
{{ item.suffixKey }}
</v-avatar>
{{ item.text }}
</v-chip>
</template>
<template #item="{ item }">
- <v-chip
- :color="item.backgroundColor"
- :text-color="$contrastColor(item.backgroundColor)"
- >
- <v-avatar
- v-if="item.suffixKey"
- left
- color="white"
- class="black--text font-weight-bold"
- >
+ <v-chip :color="item.backgroundColor" :text-color="$contrastColor(item.backgroundColor)">
+ <v-avatar v-if="item.suffixKey" left color="white" class="black--text font-weight-bold">
{{ item.suffixKey }}
</v-avatar>
{{ item.text }}
@@ -61,15 +48,15 @@ export default {
},
annotations: {
type: Array,
- default: () => ([]),
+ default: () => [],
required: true
}
},
computed: {
annotatedLabel() {
- const labelIds = this.annotations.map(item => item.label)
- return this.labels.find(item => labelIds.includes(item.id))
+ const labelIds = this.annotations.map((item) => item.label)
+ return this.labels.find((item) => labelIds.includes(item.id))
}
},
@@ -87,7 +74,7 @@ export default {
},
remove(label) {
- const annotation = this.annotations.find(item => item.label === label.id)
+ const annotation = this.annotations.find((item) => item.label === label.id)
this.$emit('remove', annotation.id)
}
}
diff --git a/frontend/components/tasks/toolbar/ToolbarLaptop.vue b/frontend/components/tasks/toolbar/ToolbarLaptop.vue
index cdc8a42de9..ebc2c42dfb 100644
--- a/frontend/components/tasks/toolbar/ToolbarLaptop.vue
+++ b/frontend/components/tasks/toolbar/ToolbarLaptop.vue
@@ -1,60 +1,39 @@
<template>
- <v-toolbar
- class="toolbar-control"
- dense
- flat
- >
+ <v-toolbar class="toolbar-control" dense flat>
<v-row no-gutters>
<v-btn-toggle>
- <button-review
- :is-reviewd="isReviewd"
- @click:review="$emit('click:review')"
- />
+ <button-review :is-reviewd="isReviewd" @click:review="$emit('click:review')" />
- <button-filter
- :value="filterOption"
- @click:filter="changeFilter"
- />
+ <button-filter :value="filterOption" @click:filter="changeFilter" />
- <button-guideline
- @click:guideline="dialogGuideline=true"
- />
+ <button-guideline @click:guideline="dialogGuideline = true" />
<v-dialog v-model="dialogGuideline">
- <form-guideline
- :guideline-text="guidelineText"
- @click:close="dialogGuideline=false"
- />
+ <form-guideline :guideline-text="guidelineText" @click:close="dialogGuideline = false" />
</v-dialog>
- <button-comment
- @click:comment="dialogComment=true"
- />
+ <button-comment @click:comment="dialogComment = true" />
<v-dialog v-model="dialogComment">
- <form-comment
- :example-id="docId"
- @click:cancel="dialogComment=false"
- />
+ <form-comment :example-id="docId" @click:cancel="dialogComment = false" />
</v-dialog>
- <button-auto-labeling
- @click:auto="dialogAutoLabeling=true"
- />
+ <button-auto-labeling @click:auto="dialogAutoLabeling = true" />
<v-dialog v-model="dialogAutoLabeling">
<form-auto-labeling
:is-enabled="enableAutoLabeling"
:error-message="errorMessage"
- @click:cancel="dialogAutoLabeling=false"
+ @click:cancel="dialogAutoLabeling = false"
@input="updateAutoLabeling"
/>
</v-dialog>
- <button-clear
- @click:clear="dialogClear=true"
- />
+ <button-clear @click:clear="dialogClear = true" />
<v-dialog v-model="dialogClear">
<form-clear-label
- @click:ok="$emit('click:clear-label');dialogClear=false"
- @click:cancel="dialogClear=false"
+ @click:ok="
+ $emit('click:clear-label')
+ dialogClear = false
+ "
+ @click:cancel="dialogClear = false"
/>
</v-dialog>
</v-btn-toggle>
@@ -150,21 +129,25 @@ export default Vue.extend({
methods: {
updatePage(page: number) {
- this.$router.push({ query: {
- page: page.toString(),
- isChecked: this.filterOption,
- q: this.$route.query.q
- }})
+ this.$router.push({
+ query: {
+ page: page.toString(),
+ isChecked: this.filterOption,
+ q: this.$route.query.q
+ }
+ })
},
changeFilter(isChecked: string) {
- this.$router.push({ query: {
- page: '1',
- isChecked,
- q: this.$route.query.q
- }})
+ this.$router.push({
+ query: {
+ page: '1',
+ isChecked,
+ q: this.$route.query.q
+ }
+ })
},
-
+
updateAutoLabeling(isEnable: boolean) {
if (isEnable) {
this.$emit('update:enable-auto-labeling', true)
@@ -184,4 +167,4 @@ export default Vue.extend({
::v-deep .v-dialog {
width: 800px;
}
-</style>
\ No newline at end of file
+</style>
diff --git a/frontend/components/tasks/toolbar/ToolbarMobile.vue b/frontend/components/tasks/toolbar/ToolbarMobile.vue
index 734111d49f..6009c1a1ec 100644
--- a/frontend/components/tasks/toolbar/ToolbarMobile.vue
+++ b/frontend/components/tasks/toolbar/ToolbarMobile.vue
@@ -1,21 +1,11 @@
<template>
- <v-bottom-navigation
- app
- absolute
- hide-on-scroll
- >
- <v-btn
- :disabled="isFirstPage"
- @click="updatePage(page - 1)"
- >
+ <v-bottom-navigation app absolute hide-on-scroll>
+ <v-btn :disabled="isFirstPage" @click="updatePage(page - 1)">
<span>Prev</span>
<v-icon>{{ mdiChevronLeft }}</v-icon>
</v-btn>
- <v-btn
- :disabled="isLastPage"
- @click="updatePage(page + 1)"
- >
+ <v-btn :disabled="isLastPage" @click="updatePage(page + 1)">
<span>Next</span>
<v-icon>{{ mdiChevronRight }}</v-icon>
</v-btn>
@@ -57,7 +47,7 @@ export default Vue.extend({
methods: {
updatePage(page: number) {
- this.$router.push({ query: { page: page.toString() }})
+ this.$router.push({ query: { page: page.toString() } })
}
}
})
diff --git a/frontend/components/tasks/toolbar/buttons/ButtonAutoLabeling.vue b/frontend/components/tasks/toolbar/buttons/ButtonAutoLabeling.vue
index 2052c6a33d..d68431431b 100644
--- a/frontend/components/tasks/toolbar/buttons/ButtonAutoLabeling.vue
+++ b/frontend/components/tasks/toolbar/buttons/ButtonAutoLabeling.vue
@@ -1,11 +1,7 @@
<template>
<v-tooltip bottom>
<template #activator="{ on }">
- <v-btn
- icon
- v-on="on"
- @click="$emit('click:auto')"
- >
+ <v-btn icon v-on="on" @click="$emit('click:auto')">
<v-icon>
{{ mdiAutoFix }}
</v-icon>
diff --git a/frontend/components/tasks/toolbar/buttons/ButtonClear.vue b/frontend/components/tasks/toolbar/buttons/ButtonClear.vue
index 08cb8dcf39..d963b7be53 100644
--- a/frontend/components/tasks/toolbar/buttons/ButtonClear.vue
+++ b/frontend/components/tasks/toolbar/buttons/ButtonClear.vue
@@ -1,11 +1,7 @@
<template>
<v-tooltip bottom>
<template #activator="{ on }">
- <v-btn
- icon
- v-on="on"
- @click="$emit('click:clear')"
- >
+ <v-btn icon v-on="on" @click="$emit('click:clear')">
<v-icon>
{{ mdiDeleteOutline }}
</v-icon>
@@ -23,6 +19,6 @@ export default {
return {
mdiDeleteOutline
}
- },
+ }
}
</script>
diff --git a/frontend/components/tasks/toolbar/buttons/ButtonComment.vue b/frontend/components/tasks/toolbar/buttons/ButtonComment.vue
index 1521a3361c..9f3d5737a0 100644
--- a/frontend/components/tasks/toolbar/buttons/ButtonComment.vue
+++ b/frontend/components/tasks/toolbar/buttons/ButtonComment.vue
@@ -1,11 +1,7 @@
<template>
<v-tooltip bottom>
<template #activator="{ on }">
- <v-btn
- icon
- v-on="on"
- @click="$emit('click:comment')"
- >
+ <v-btn icon v-on="on" @click="$emit('click:comment')">
<v-icon>
{{ mdiMessageText }}
</v-icon>
diff --git a/frontend/components/tasks/toolbar/buttons/ButtonFilter.vue b/frontend/components/tasks/toolbar/buttons/ButtonFilter.vue
index 37dc84128d..05ff609992 100644
--- a/frontend/components/tasks/toolbar/buttons/ButtonFilter.vue
+++ b/frontend/components/tasks/toolbar/buttons/ButtonFilter.vue
@@ -3,10 +3,7 @@
<template #activator="{ on: menu }">
<v-tooltip bottom>
<template #activator="{ on: tooltip }">
- <v-btn
- icon
- v-on="{ ...tooltip, ...menu }"
- >
+ <v-btn icon v-on="{ ...tooltip, ...menu }">
<v-icon>
{{ mdiFilter }}
</v-icon>
@@ -17,10 +14,7 @@
</template>
<v-list>
<v-list-item-group v-model="selected" mandatory>
- <v-list-item
- v-for="(item, i) in items"
- :key="i"
- >
+ <v-list-item v-for="(item, i) in items" :key="i">
<v-list-item-icon>
<v-icon v-if="selected === i">
{{ mdiCheck }}
@@ -64,7 +58,7 @@ export default {
computed: {
selected: {
get() {
- const index = this.items.findIndex(item => item.param === this.value)
+ const index = this.items.findIndex((item) => item.param === this.value)
return index === -1 ? 0 : index
},
set(value) {
diff --git a/frontend/components/tasks/toolbar/buttons/ButtonGuideline.vue b/frontend/components/tasks/toolbar/buttons/ButtonGuideline.vue
index 6cdccf91da..03cf207461 100644
--- a/frontend/components/tasks/toolbar/buttons/ButtonGuideline.vue
+++ b/frontend/components/tasks/toolbar/buttons/ButtonGuideline.vue
@@ -1,11 +1,7 @@
<template>
<v-tooltip bottom>
<template #activator="{ on }">
- <v-btn
- icon
- v-on="on"
- @click="$emit('click:guideline')"
- >
+ <v-btn icon v-on="on" @click="$emit('click:guideline')">
<v-icon>
{{ mdiBookOpenOutline }}
</v-icon>
@@ -23,6 +19,6 @@ export default {
return {
mdiBookOpenOutline
}
- },
+ }
}
</script>
diff --git a/frontend/components/tasks/toolbar/buttons/ButtonLabelSwitch.vue b/frontend/components/tasks/toolbar/buttons/ButtonLabelSwitch.vue
index d2a62bf315..9622242ea6 100644
--- a/frontend/components/tasks/toolbar/buttons/ButtonLabelSwitch.vue
+++ b/frontend/components/tasks/toolbar/buttons/ButtonLabelSwitch.vue
@@ -1,8 +1,5 @@
<template>
- <v-btn-toggle
- v-model="option"
- mandatory
- >
+ <v-btn-toggle v-model="option" mandatory>
<v-btn icon>
<v-icon>{{ mdiFormatListBulleted }}</v-icon>
</v-btn>
diff --git a/frontend/components/tasks/toolbar/buttons/ButtonPagination.vue b/frontend/components/tasks/toolbar/buttons/ButtonPagination.vue
index 65c3996bbe..11caf161bc 100644
--- a/frontend/components/tasks/toolbar/buttons/ButtonPagination.vue
+++ b/frontend/components/tasks/toolbar/buttons/ButtonPagination.vue
@@ -1,15 +1,9 @@
<template>
<div class="v-data-footer">
- <v-edit-dialog
- large
- persistent
- @save="changePageNumber"
- >
+ <v-edit-dialog large persistent @save="changePageNumber">
<span>{{ value }} of {{ total }}</span>
<template #input>
- <div class="mt-4 title">
- Move Page
- </div>
+ <div class="mt-4 title">Move Page</div>
<v-text-field
v-model="editedPage"
:rules="rules"
@@ -89,7 +83,8 @@ export default Vue.extend({
return {
editedPage: '1',
rules: [
- (v: string) => (v && parseInt(v, 10) > 0 && parseInt(v, 10) <= this.total) || 'Invalid page number!'
+ (v: string) =>
+ (v && parseInt(v, 10) > 0 && parseInt(v, 10) <= this.total) || 'Invalid page number!'
],
mdiPageFirst,
mdiPageLast,
diff --git a/frontend/components/tasks/toolbar/forms/FormAutoLabeling.vue b/frontend/components/tasks/toolbar/forms/FormAutoLabeling.vue
index 07654aa143..e0393813f1 100644
--- a/frontend/components/tasks/toolbar/forms/FormAutoLabeling.vue
+++ b/frontend/components/tasks/toolbar/forms/FormAutoLabeling.vue
@@ -1,19 +1,16 @@
<template>
- <base-card
- title="Settings"
- :cancel-text="$t('generic.close')"
- @cancel="$emit('click:cancel')"
- >
+ <base-card title="Settings" :cancel-text="$t('generic.close')" @cancel="$emit('click:cancel')">
<template #content>
<h3>Auto Labeling</h3>
<p>
- The auto labeling allows users to annotate data automatically.
- It enables them to speed up annotating data.
- You only have to correct labels which are mislabeled by the system and annotate labels which aren’t labeled by it.
+ The auto labeling allows users to annotate data automatically. It enables them to speed up
+ annotating data. You only have to correct labels which are mislabeled by the system and
+ annotate labels which aren’t labeled by it.
</p>
<p>
- Notice that you can't use this feature unless the project administrators configure the auto labeling.
- Also, depending on the configuration, it will take some cost for the administrators(e.g. In the case of configuring some paid service like AWS or GCP).
+ Notice that you can't use this feature unless the project administrators configure the auto
+ labeling. Also, depending on the configuration, it will take some cost for the
+ administrators(e.g. In the case of configuring some paid service like AWS or GCP).
</p>
<v-switch
:value="isEnabled"
diff --git a/frontend/components/tasks/toolbar/forms/FormComment.vue b/frontend/components/tasks/toolbar/forms/FormComment.vue
index 3abb79e57e..dfd3af6df7 100644
--- a/frontend/components/tasks/toolbar/forms/FormComment.vue
+++ b/frontend/components/tasks/toolbar/forms/FormComment.vue
@@ -5,9 +5,7 @@
@cancel="$emit('click:cancel')"
>
<template v-if="user.id" #content>
- <form-create
- @add-comment="add"
- />
+ <form-create @add-comment="add" />
<comment
v-for="comment in comments"
:key="comment.id"
@@ -44,7 +42,7 @@ export default Vue.extend({
data() {
return {
user: {},
- comments: [] as CommentReadDTO[],
+ comments: [] as CommentReadDTO[]
}
},
diff --git a/frontend/components/tasks/toolbar/forms/FormGuideline.vue b/frontend/components/tasks/toolbar/forms/FormGuideline.vue
index 175883fe5d..d01c1ab7df 100644
--- a/frontend/components/tasks/toolbar/forms/FormGuideline.vue
+++ b/frontend/components/tasks/toolbar/forms/FormGuideline.vue
@@ -5,9 +5,7 @@
@cancel="close"
>
<template #content>
- <viewer
- :initial-value="guidelineText"
- />
+ <viewer :initial-value="guidelineText" />
</template>
</base-card>
</template>
diff --git a/frontend/components/utils/ActionMenu.vue b/frontend/components/utils/ActionMenu.vue
index f94e1f26c5..454f79baa6 100644
--- a/frontend/components/utils/ActionMenu.vue
+++ b/frontend/components/utils/ActionMenu.vue
@@ -1,23 +1,13 @@
<template>
- <v-menu
- offset-y
- open-on-hover
- >
+ <v-menu offset-y open-on-hover>
<template #activator="{ on }">
- <v-btn
- color="primary text-capitalize"
- v-on="on"
- >
+ <v-btn color="primary text-capitalize" v-on="on">
{{ text }}
<v-icon>{{ mdiMenuDown }}</v-icon>
</v-btn>
</template>
<v-list>
- <v-list-item
- v-for="(item, index) in items"
- :key="index"
- @click="$emit(item.event)"
- >
+ <v-list-item v-for="(item, index) in items" :key="index" @click="$emit(item.event)">
<v-list-item-icon>
<v-icon>{{ item.icon }}</v-icon>
</v-list-item-icon>
@@ -50,6 +40,6 @@ export default Vue.extend({
return {
mdiMenuDown
}
- },
+ }
})
</script>
diff --git a/frontend/components/utils/BaseCard.vue b/frontend/components/utils/BaseCard.vue
index d928fe3e72..20600750b6 100644
--- a/frontend/components/utils/BaseCard.vue
+++ b/frontend/components/utils/BaseCard.vue
@@ -1,9 +1,6 @@
<template>
<v-card>
- <v-toolbar
- color="primary white--text"
- flat
- >
+ <v-toolbar color="primary white--text" flat>
<v-toolbar-title>{{ title }}</v-toolbar-title>
</v-toolbar>
<v-card-text class="text--primary mt-3 pl-4">
diff --git a/frontend/composables/useExampleItem.ts b/frontend/composables/useExampleItem.ts
index 718382b609..d616850ba3 100644
--- a/frontend/composables/useExampleItem.ts
+++ b/frontend/composables/useExampleItem.ts
@@ -12,9 +12,9 @@ export const useExampleItem = () => {
const { app } = useContext()
const exampleService = app.$services.example
- const getExample = async(
+ const getExample = async (
projectId: string,
- { page, q, isChecked }: { page: string, q: string, isChecked: string}
+ { page, q, isChecked }: { page: string; q: string; isChecked: string }
) => {
const examples = await exampleService.fetchOne(projectId, page, q, isChecked)
state.totalExample = examples.count
@@ -23,19 +23,15 @@ export const useExampleItem = () => {
}
}
- const getExampleById = async(
- projectId: string
- ) => {
+ const getExampleById = async (projectId: string) => {
state.example = await exampleService.findById(projectId, state.example.id)
}
- const updateProgress = async(projectId: string) => {
+ const updateProgress = async (projectId: string) => {
state.progress = await app.$services.metrics.fetchMyProgress(projectId)
}
- const confirm = async(
- projectId: string,
- ) => {
+ const confirm = async (projectId: string) => {
await exampleService.confirm(projectId, state.example.id)
await getExampleById(projectId)
updateProgress(projectId)
diff --git a/frontend/composables/useLabelList.ts b/frontend/composables/useLabelList.ts
index 1bc025b763..59bf4603cb 100644
--- a/frontend/composables/useLabelList.ts
+++ b/frontend/composables/useLabelList.ts
@@ -1,6 +1,6 @@
import { computed, reactive } from '@nuxtjs/composition-api'
import { LabelDTO } from '@/services/application/label/labelData'
-import { CreateLabelCommand , UpdateLabelCommand } from '@/services/application/label/labelCommand'
+import { CreateLabelCommand, UpdateLabelCommand } from '@/services/application/label/labelCommand'
import { LabelApplicationService } from '@/services/application/label/labelApplicationService'
export const useLabelList = (service: LabelApplicationService) => {
@@ -8,41 +8,30 @@ export const useLabelList = (service: LabelApplicationService) => {
labels: [] as LabelDTO[]
})
- const getLabelList = async(
- projectId: string
- ) => {
+ const getLabelList = async (projectId: string) => {
state.labels = await service.list(projectId)
}
- const createLabel = async(
- projectId: string,
- command: CreateLabelCommand
- ) => {
+ const createLabel = async (projectId: string, command: CreateLabelCommand) => {
await service.create(projectId, command)
await getLabelList(projectId)
}
- const updateLabel = async(
- projectId: string,
- command: UpdateLabelCommand
- ) => {
+ const updateLabel = async (projectId: string, command: UpdateLabelCommand) => {
await service.update(projectId, command)
}
- const deleteLabelList = async(
- projectId: string,
- items: LabelDTO[]
- ) => {
+ const deleteLabelList = async (projectId: string, items: LabelDTO[]) => {
await service.bulkDelete(projectId, items)
await getLabelList(projectId)
}
const findLabelById = (labelId: number) => {
- return state.labels.find(item => item.id === labelId)
+ return state.labels.find((item) => item.id === labelId)
}
const shortKeys = computed(() => {
- return Object.fromEntries(state.labels.map(item => [item.id, [item.suffixKey]]))
+ return Object.fromEntries(state.labels.map((item) => [item.id, [item.suffixKey]]))
})
return {
@@ -52,6 +41,6 @@ export const useLabelList = (service: LabelApplicationService) => {
createLabel,
updateLabel,
deleteLabelList,
- shortKeys,
+ shortKeys
}
}
diff --git a/frontend/composables/useProjectItem.ts b/frontend/composables/useProjectItem.ts
index 16065d0d8a..a086ce8f36 100644
--- a/frontend/composables/useProjectItem.ts
+++ b/frontend/composables/useProjectItem.ts
@@ -9,9 +9,7 @@ export const useProjectItem = () => {
const { app } = useContext()
const projectService = app.$services.project
- const getProjectById = async(
- projectId: string
- ) => {
+ const getProjectById = async (projectId: string) => {
state.project = await projectService.findById(projectId)
}
diff --git a/frontend/composables/useTeacherList.ts b/frontend/composables/useTeacherList.ts
index aae1b88bd5..bdc38522e2 100644
--- a/frontend/composables/useTeacherList.ts
+++ b/frontend/composables/useTeacherList.ts
@@ -5,57 +5,36 @@ export const useTeacherList = (service: any) => {
teacherList: []
})
- const getTeacherList = async(
- projectId: string,
- exampleId: number
- ) => {
+ const getTeacherList = async (projectId: string, exampleId: number) => {
state.teacherList = await service.list(projectId, exampleId)
}
- const removeTeacher = async(
- projectId: string,
- exampleId: number,
- teacherId: number
- ) => {
+ const removeTeacher = async (projectId: string, exampleId: number, teacherId: number) => {
await service.delete(projectId, exampleId, teacherId)
await getTeacherList(projectId, exampleId)
}
- const annotateLabel = async(
- projectId: string,
- exampleId: number,
- labelId: number
- ) => {
+ const annotateLabel = async (projectId: string, exampleId: number, labelId: number) => {
await service.create(projectId, exampleId, labelId)
await getTeacherList(projectId, exampleId)
}
- const clearTeacherList = async(
- projectId: string,
- exampleId: number
- ) => {
+ const clearTeacherList = async (projectId: string, exampleId: number) => {
await service.clear(projectId, exampleId)
await getTeacherList(projectId, exampleId)
}
- const autoLabel = async(
- projectId: string,
- exampleId: number
- ) => {
+ const autoLabel = async (projectId: string, exampleId: number) => {
await service.autoLabel(projectId, exampleId)
await getTeacherList(projectId, exampleId)
}
- const annotateOrRemoveLabel = async(
- projectId: string,
- exampleId: number,
- srcKey: string
- ) => {
+ const annotateOrRemoveLabel = async (projectId: string, exampleId: number, srcKey: string) => {
const labelId = parseInt(srcKey, 10)
// @ts-ignore
- const annotation = state.teacherList.find(item => item.label === labelId)
+ const annotation = state.teacherList.find((item) => item.label === labelId)
if (annotation) {
- // @ts-ignore
+ // @ts-ignore
await removeTeacher(projectId, exampleId, annotation.id)
} else {
await annotateLabel(projectId, exampleId, labelId)
@@ -69,6 +48,6 @@ export const useTeacherList = (service: any) => {
annotateOrRemoveLabel,
removeTeacher,
clearTeacherList,
- autoLabel,
+ autoLabel
}
-}
\ No newline at end of file
+}
diff --git a/frontend/domain/models/autoLabeling/config.ts b/frontend/domain/models/autoLabeling/config.ts
index 119c534414..89fd0d9f6b 100644
--- a/frontend/domain/models/autoLabeling/config.ts
+++ b/frontend/domain/models/autoLabeling/config.ts
@@ -6,27 +6,27 @@ export class ConfigItemList {
}
toArray(): Object[] {
- return this.configItems.map(item => item.toObject())
+ return this.configItems.map((item) => item.toObject())
}
}
interface LabelMappingForUI {
- from: string,
+ from: string
to: string
}
export interface ParametersForUI {
- name: string,
- value: string | object[],
- type?: string,
+ name: string
+ value: string | object[]
+ type?: string
items?: string[]
}
export interface Fields {
- modelName: string,
- modelAttrs: ParametersForUI[],
- template: string,
- labelMapping: LabelMappingForUI[],
+ modelName: string
+ modelAttrs: ParametersForUI[]
+ template: string
+ labelMapping: LabelMappingForUI[]
taskType: string
}
@@ -37,23 +37,42 @@ export class ConfigItem {
public modelAttrs: object,
public template: string,
public labelMapping: object,
- public taskType: string,
+ public taskType: string
) {}
- static valueOf(
- { id, model_name, model_attrs, template, label_mapping, task_type }:
- { id: number, model_name: string, model_attrs: object, template: string, label_mapping: object, task_type: string }
- ): ConfigItem {
+ static valueOf({
+ id,
+ model_name,
+ model_attrs,
+ template,
+ label_mapping,
+ task_type
+ }: {
+ id: number
+ model_name: string
+ model_attrs: object
+ template: string
+ label_mapping: object
+ task_type: string
+ }): ConfigItem {
return new ConfigItem(id, model_name, model_attrs, template, label_mapping, task_type)
}
- static parseFromUI(
- { modelName, modelAttrs, template, labelMapping, taskType }: Fields): ConfigItem {
- const mapping = labelMapping.reduce((a, x) => ({...a, [x.from]: x.to}), {})
- const attributes: {[key: string]: any} = modelAttrs.reduce((a, x) => ({...a, [x.name]: x.value}), {})
+ static parseFromUI({
+ modelName,
+ modelAttrs,
+ template,
+ labelMapping,
+ taskType
+ }: Fields): ConfigItem {
+ const mapping = labelMapping.reduce((a, x) => ({ ...a, [x.from]: x.to }), {})
+ const attributes: { [key: string]: any } = modelAttrs.reduce(
+ (a, x) => ({ ...a, [x.name]: x.value }),
+ {}
+ )
for (const [key, value] of Object.entries(attributes)) {
if (Array.isArray(value)) {
- attributes[key] = value.reduce((a, x) => ({...a, [x.key]: x.value}), {})
+ attributes[key] = value.reduce((a, x) => ({ ...a, [x.key]: x.value }), {})
}
}
return new ConfigItem(99999, modelName, attributes, template, mapping, taskType)
diff --git a/frontend/domain/models/autoLabeling/configRepository.ts b/frontend/domain/models/autoLabeling/configRepository.ts
index 205747a686..c3c5ab482a 100644
--- a/frontend/domain/models/autoLabeling/configRepository.ts
+++ b/frontend/domain/models/autoLabeling/configRepository.ts
@@ -1,7 +1,7 @@
import { ConfigItem, ConfigItemList } from '~/domain/models/autoLabeling/config'
export interface ConfigTestResponse {
- valid: boolean,
+ valid: boolean
labels: object[]
}
diff --git a/frontend/domain/models/autoLabeling/template.ts b/frontend/domain/models/autoLabeling/template.ts
index 7276e1e3eb..4d56bd5b8c 100644
--- a/frontend/domain/models/autoLabeling/template.ts
+++ b/frontend/domain/models/autoLabeling/template.ts
@@ -1,27 +1,21 @@
import { Fields, ParametersForUI } from '~/domain/models/autoLabeling/config'
export interface Schema {
- title: string,
- type: string,
+ title: string
+ type: string
properties: object
}
export interface ConfigResponse {
- name: string,
- schema: Schema,
+ name: string
+ schema: Schema
template: string
}
export class ConfigTemplateItem {
- constructor(
- private schema: Schema,
- public template: string
- ) {}
+ constructor(private schema: Schema, public template: string) {}
- static valueOf(
- { schema, template }:
- { schema: Schema, template: string }
- ): ConfigTemplateItem {
+ static valueOf({ schema, template }: { schema: Schema; template: string }): ConfigTemplateItem {
return new ConfigTemplateItem(schema, template)
}
@@ -33,26 +27,20 @@ export class ConfigTemplateItem {
const response: ParametersForUI[] = []
for (const [key, value] of Object.entries(this.schema.properties)) {
if ('type' in value && value.type === 'string') {
- response.push({name: key, type: 'textField', value: ''})
+ response.push({ name: key, type: 'textField', value: '' })
} else if ('anyOf' in value) {
- response.push(
- {
- name: key,
- type: 'selectField',
- value: '',
- items: value.anyOf.map(
- (item: {'const': string, 'type': string}) => item.const
- )
- }
- )
+ response.push({
+ name: key,
+ type: 'selectField',
+ value: '',
+ items: value.anyOf.map((item: { const: string; type: string }) => item.const)
+ })
} else if ('type' in value && value.type === 'object') {
- response.push(
- {
- name: key,
- type: 'objectField',
- value: []
- }
- )
+ response.push({
+ name: key,
+ type: 'objectField',
+ value: []
+ })
}
}
return response
diff --git a/frontend/domain/models/celery/status.ts b/frontend/domain/models/celery/status.ts
index 24fb766f8f..4f1cc36188 100644
--- a/frontend/domain/models/celery/status.ts
+++ b/frontend/domain/models/celery/status.ts
@@ -1,5 +1,5 @@
export class Status {
- ready: boolean;
- result: object;
- error: any;
+ ready: boolean
+ result: object
+ error: any
}
diff --git a/frontend/domain/models/celery/taskStatusRepository.ts b/frontend/domain/models/celery/taskStatusRepository.ts
index 413314e24f..24f3928856 100644
--- a/frontend/domain/models/celery/taskStatusRepository.ts
+++ b/frontend/domain/models/celery/taskStatusRepository.ts
@@ -1,6 +1,5 @@
import { Status } from './status'
-
export interface TaskStatusRepository {
get(taskId: string): Promise<Status>
}
diff --git a/frontend/domain/models/comment/comment.ts b/frontend/domain/models/comment/comment.ts
index 0dc161889c..48c99bf99e 100644
--- a/frontend/domain/models/comment/comment.ts
+++ b/frontend/domain/models/comment/comment.ts
@@ -1,15 +1,15 @@
-import "reflect-metadata"
+import 'reflect-metadata'
import { Expose, Type } from 'class-transformer'
export class CommentItem {
- id: number;
- user: number;
- username: string;
- example: number;
- text: string;
+ id: number
+ user: number
+ username: string
+ example: number
+ text: string
@Expose({ name: 'created_at' })
- createdAt: string;
+ createdAt: string
by(userId: number) {
return this.user === userId
@@ -28,11 +28,11 @@ export class CommentItem {
}
export class CommentItemList {
- count: number;
- next: string | null;
- prev: string | null;
+ count: number
+ next: string | null
+ prev: string | null
@Type(() => CommentItem)
@Expose({ name: 'results' })
- items: CommentItem[];
+ items: CommentItem[]
}
diff --git a/frontend/domain/models/comment/commentRepository.ts b/frontend/domain/models/comment/commentRepository.ts
index a5a02529a9..0ce92efb42 100644
--- a/frontend/domain/models/comment/commentRepository.ts
+++ b/frontend/domain/models/comment/commentRepository.ts
@@ -1,6 +1,6 @@
import { CommentItem, CommentItemList } from '~/domain/models/comment/comment'
-export type SearchOption = {[key: string]: string | (string | null)[]}
+export type SearchOption = { [key: string]: string | (string | null)[] }
export interface CommentRepository {
listAll(projectId: string, { limit, offset, q }: SearchOption): Promise<CommentItemList>
diff --git a/frontend/domain/models/download/downloadFormatRepository.ts b/frontend/domain/models/download/downloadFormatRepository.ts
index 5b70240fc3..e1d833c5f4 100644
--- a/frontend/domain/models/download/downloadFormatRepository.ts
+++ b/frontend/domain/models/download/downloadFormatRepository.ts
@@ -1,6 +1,5 @@
import { Format } from './format'
-
export interface DownloadFormatRepository {
list(projectId: string): Promise<Format[]>
}
diff --git a/frontend/domain/models/download/format.ts b/frontend/domain/models/download/format.ts
index 5f78d5e20c..514dd3063c 100644
--- a/frontend/domain/models/download/format.ts
+++ b/frontend/domain/models/download/format.ts
@@ -1,5 +1,5 @@
export class Format {
- name: string;
- example: string;
- properties: object;
+ name: string
+ example: string
+ properties: object
}
diff --git a/frontend/domain/models/example/example.ts b/frontend/domain/models/example/example.ts
index ae01de1b45..91f937c939 100644
--- a/frontend/domain/models/example/example.ts
+++ b/frontend/domain/models/example/example.ts
@@ -1,25 +1,25 @@
-import "reflect-metadata"
+import 'reflect-metadata'
import { Expose, Type } from 'class-transformer'
export class ExampleItem {
- id: number;
- text: string;
- meta: object;
+ id: number
+ text: string
+ meta: object
@Expose({ name: 'annotation_approver' })
- annotationApprover: boolean | null;
+ annotationApprover: boolean | null
@Expose({ name: 'comment_count' })
- commentCount: number;
+ commentCount: number
@Expose({ name: 'filename' })
- fileUrl: string;
+ fileUrl: string
@Expose({ name: 'is_confirmed' })
- isConfirmed: boolean;
+ isConfirmed: boolean
@Expose({ name: 'upload_name' })
- filename: string;
+ filename: string
get url() {
const l = this.fileUrl.indexOf('media/')
@@ -39,11 +39,11 @@ export class ExampleItem {
}
export class ExampleItemList {
- count: number;
- next: string | null;
- prev: string | null;
+ count: number
+ next: string | null
+ prev: string | null
@Type(() => ExampleItem)
@Expose({ name: 'results' })
- items: ExampleItem[];
+ items: ExampleItem[]
}
diff --git a/frontend/domain/models/example/exampleRepository.ts b/frontend/domain/models/example/exampleRepository.ts
index 9991558e2d..06e417897b 100644
--- a/frontend/domain/models/example/exampleRepository.ts
+++ b/frontend/domain/models/example/exampleRepository.ts
@@ -1,6 +1,6 @@
import { ExampleItem, ExampleItemList } from '~/domain/models/example/example'
-export type SearchOption = {[key: string]: string | (string | null)[]}
+export type SearchOption = { [key: string]: string | (string | null)[] }
export interface ExampleRepository {
list(projectId: string, { limit, offset, q, isChecked }: SearchOption): Promise<ExampleItemList>
diff --git a/frontend/domain/models/label/label.ts b/frontend/domain/models/label/label.ts
index f702684405..75aa766273 100644
--- a/frontend/domain/models/label/label.ts
+++ b/frontend/domain/models/label/label.ts
@@ -1,20 +1,20 @@
import { Expose } from 'class-transformer'
export class LabelItem {
- id: number;
- text: string;
+ id: number
+ text: string
@Expose({ name: 'prefix_key' })
- prefixKey: string | null;
+ prefixKey: string | null
@Expose({ name: 'suffix_key' })
- suffixKey: string | null;
+ suffixKey: string | null
@Expose({ name: 'background_color' })
- backgroundColor: string;
+ backgroundColor: string
@Expose({ name: 'text_color' })
- textColor: string = '#ffffff';
+ textColor: string = '#ffffff'
toObject() {
return {
diff --git a/frontend/domain/models/label/labelRepository.ts b/frontend/domain/models/label/labelRepository.ts
index a38178a969..90ace8c36a 100644
--- a/frontend/domain/models/label/labelRepository.ts
+++ b/frontend/domain/models/label/labelRepository.ts
@@ -11,5 +11,5 @@ export interface LabelRepository {
bulkDelete(projectId: string, labelIds: number[]): Promise<void>
- uploadFile(projectId: string, payload: FormData): Promise<void>
+ uploadFile(projectId: string, payload: FormData): Promise<void>
}
diff --git a/frontend/domain/models/member/member.ts b/frontend/domain/models/member/member.ts
index 99b7bb041c..d7c060f168 100644
--- a/frontend/domain/models/member/member.ts
+++ b/frontend/domain/models/member/member.ts
@@ -1,9 +1,9 @@
export class MemberItem {
- id: number;
- user: number;
- role: number;
- username: string;
- rolename: string;
+ id: number
+ user: number
+ role: number
+ username: string
+ rolename: string
get isProjectAdmin(): boolean {
return this.rolename === 'project_admin'
diff --git a/frontend/domain/models/metrics/metrics.ts b/frontend/domain/models/metrics/metrics.ts
index 0a4c27478f..aa3df6626a 100644
--- a/frontend/domain/models/metrics/metrics.ts
+++ b/frontend/domain/models/metrics/metrics.ts
@@ -1,10 +1,10 @@
-export type Label = {[key: string]: number}
-export type User = {[key: string]: number}
-export type ConfirmedCount = {[key: string]: number}
-export type Distribution = {[user: string]: {[label: string]: number}}
+export type Label = { [key: string]: number }
+export type User = { [key: string]: number }
+export type ConfirmedCount = { [key: string]: number }
+export type Distribution = { [user: string]: { [label: string]: number } }
export interface Progress {
total: number
- progress: {user: string, done: number}[]
+ progress: { user: string; done: number }[]
}
export interface MyProgress {
diff --git a/frontend/domain/models/option/option.ts b/frontend/domain/models/option/option.ts
index 0f453f72bf..8fc046e758 100644
--- a/frontend/domain/models/option/option.ts
+++ b/frontend/domain/models/option/option.ts
@@ -1,9 +1,7 @@
export class PageNumber {
num: number
- constructor(
- public page: number
- ) {
+ constructor(public page: number) {
if (typeof page === 'string' && /^\d+$/.test(page)) {
this.num = parseInt(page, 10)
}
@@ -15,16 +13,17 @@ export class PageNumber {
}
export class OptionItem {
- constructor(
- public page : number,
- public q? : string,
- public isChecked?: string
- ) {}
+ constructor(public page: number, public q?: string, public isChecked?: string) {}
- static valueOf(
- { page, q = '', isChecked = '' }:
- { page: number, q?: string, isChecked?: string }
- ): OptionItem {
+ static valueOf({
+ page,
+ q = '',
+ isChecked = ''
+ }: {
+ page: number
+ q?: string
+ isChecked?: string
+ }): OptionItem {
return new OptionItem(page, q, isChecked)
}
diff --git a/frontend/domain/models/project/project.ts b/frontend/domain/models/project/project.ts
index c564c90253..f47273dd17 100644
--- a/frontend/domain/models/project/project.ts
+++ b/frontend/domain/models/project/project.ts
@@ -1,67 +1,72 @@
-import "reflect-metadata"
+import 'reflect-metadata'
import { Expose, Type } from 'class-transformer'
-export type ProjectType = 'DocumentClassification' | 'SequenceLabeling' | 'Seq2seq' | 'IntentDetectionAndSlotFilling' | 'ImageClassification' | 'Speech2text'
-
+export type ProjectType =
+ | 'DocumentClassification'
+ | 'SequenceLabeling'
+ | 'Seq2seq'
+ | 'IntentDetectionAndSlotFilling'
+ | 'ImageClassification'
+ | 'Speech2text'
export class ProjectReadItem {
- id: number;
- name: string;
- description: string;
- guideline: string;
- users: number[];
- tags: Object[];
+ id: number
+ name: string
+ description: string
+ guideline: string
+ users: number[]
+ tags: Object[]
@Expose({ name: 'project_type' })
- projectType: ProjectType;
+ projectType: ProjectType
@Expose({ name: 'updated_at' })
- updatedAt: string;
+ updatedAt: string
@Expose({ name: 'random_order' })
- randomOrder: boolean;
+ randomOrder: boolean
@Expose({ name: 'collaborative_annotation' })
- collaborative_annotation: boolean;
+ collaborative_annotation: boolean
@Expose({ name: 'single_class_classification' })
- exclusiveCategories: boolean;
+ exclusiveCategories: boolean
@Expose({ name: 'resourcetype' })
- resourceType: string;
+ resourceType: string
@Expose({ name: 'allow_overlapping' })
- allowOverlapping: boolean;
+ allowOverlapping: boolean
@Expose({ name: 'grapheme_mode' })
- graphemeMode: boolean;
+ graphemeMode: boolean
@Expose({ name: 'use_relation' })
- useRelation: boolean;
+ useRelation: boolean
- @Expose({ name: 'is_text_project'})
- isTextProject: boolean;
+ @Expose({ name: 'is_text_project' })
+ isTextProject: boolean
@Expose({ name: 'can_define_label' })
- canDefineLabel: boolean;
+ canDefineLabel: boolean
@Expose({ name: 'can_define_relation' })
- canDefineRelation: boolean;
+ canDefineRelation: boolean
- @Expose({ name: 'can_define_span'})
- canDefineSpan: boolean;
+ @Expose({ name: 'can_define_span' })
+ canDefineSpan: boolean
@Expose({ name: 'can_define_category' })
- canDefineCategory: boolean;
+ canDefineCategory: boolean
get annotationPageLink(): string {
const mapping = {
DocumentClassification: 'text-classification',
- SequenceLabeling : 'sequence-labeling',
- Seq2seq : 'sequence-to-sequence',
+ SequenceLabeling: 'sequence-labeling',
+ Seq2seq: 'sequence-to-sequence',
IntentDetectionAndSlotFilling: 'intent-detection-and-slot-filling',
- ImageClassification : 'image-classification',
- Speech2text : 'speech-to-text',
+ ImageClassification: 'image-classification',
+ Speech2text: 'speech-to-text'
}
const url = `/projects/${this.id}/${mapping[this.projectType]}`
return url
@@ -69,49 +74,46 @@ export class ProjectReadItem {
get taskNames(): string[] {
if (this.projectType === 'IntentDetectionAndSlotFilling') {
- return [
- 'DocumentClassification',
- 'SequenceLabeling',
- ]
+ return ['DocumentClassification', 'SequenceLabeling']
}
return [this.projectType]
}
}
export class ProjectItemList {
- count: number;
- next: string | null;
- prev: string | null;
+ count: number
+ next: string | null
+ prev: string | null
@Type(() => ProjectReadItem)
@Expose({ name: 'results' })
- items: ProjectReadItem[];
+ items: ProjectReadItem[]
}
export class ProjectWriteItem {
constructor(
- public id: number,
- public name: string,
- public description: string,
- public guideline: string,
- public project_type: ProjectType,
- public random_order: boolean,
- public collaborative_annotation: boolean,
+ public id: number,
+ public name: string,
+ public description: string,
+ public guideline: string,
+ public project_type: ProjectType,
+ public random_order: boolean,
+ public collaborative_annotation: boolean,
public single_class_classification: boolean,
- public allow_overlapping: boolean,
- public grapheme_mode: boolean,
- public use_relation: boolean,
- public tags: string[],
+ public allow_overlapping: boolean,
+ public grapheme_mode: boolean,
+ public use_relation: boolean,
+ public tags: string[]
) {}
get resourceType(): string {
const mapping = {
DocumentClassification: 'TextClassificationProject',
- SequenceLabeling : 'SequenceLabelingProject',
- Seq2seq : 'Seq2seqProject',
+ SequenceLabeling: 'SequenceLabelingProject',
+ Seq2seq: 'Seq2seqProject',
IntentDetectionAndSlotFilling: 'IntentDetectionAndSlotFillingProject',
- ImageClassification : 'ImageClassificationProject',
- Speech2text : 'Speech2textProject',
+ ImageClassification: 'ImageClassificationProject',
+ Speech2text: 'Speech2textProject'
}
return mapping[this.project_type]
}
@@ -129,7 +131,7 @@ export class ProjectWriteItem {
allow_overlapping: this.allow_overlapping,
grapheme_mode: this.grapheme_mode,
use_relation: this.use_relation,
- tags: this.tags.map(tag => ({text: tag})),
+ tags: this.tags.map((tag) => ({ text: tag })),
resourcetype: this.resourceType
}
}
diff --git a/frontend/domain/models/project/projectRepository.ts b/frontend/domain/models/project/projectRepository.ts
index 223ea0175d..874579f6a8 100644
--- a/frontend/domain/models/project/projectRepository.ts
+++ b/frontend/domain/models/project/projectRepository.ts
@@ -1,6 +1,6 @@
import { ProjectReadItem, ProjectWriteItem, ProjectItemList } from '~/domain/models/project/project'
-export type SearchOption = {[key: string]: string | (string | null)[]}
+export type SearchOption = { [key: string]: string | (string | null)[] }
export interface ProjectRepository {
list({ limit, offset, q }: SearchOption): Promise<ProjectItemList>
diff --git a/frontend/domain/models/role/role.ts b/frontend/domain/models/role/role.ts
index 36895d4689..48ab4fafd5 100644
--- a/frontend/domain/models/role/role.ts
+++ b/frontend/domain/models/role/role.ts
@@ -1,6 +1,6 @@
export class RoleItem {
- id: number;
- name: string;
+ id: number
+ name: string
toObject(): Object {
return {
diff --git a/frontend/domain/models/tag/tag.ts b/frontend/domain/models/tag/tag.ts
index 4d7887e0d1..2d2a5863b3 100644
--- a/frontend/domain/models/tag/tag.ts
+++ b/frontend/domain/models/tag/tag.ts
@@ -1,7 +1,7 @@
export class TagItem {
- id: number;
- text: string;
- project: string;
+ id: number
+ text: string
+ project: string
toObject(): Object {
return {
diff --git a/frontend/domain/models/tasks/annotationRepository.ts b/frontend/domain/models/tasks/annotationRepository.ts
index 0f39657761..881d84af92 100644
--- a/frontend/domain/models/tasks/annotationRepository.ts
+++ b/frontend/domain/models/tasks/annotationRepository.ts
@@ -1,41 +1,37 @@
import ApiService from '@/services/api.service'
import { AnnotationModel } from './interface'
-
export abstract class AnnotationRepository<T extends AnnotationModel> {
- constructor(
- private readonly model: any,
- readonly request = ApiService
- ) {}
-
- public async list(projectId: string, docId: number): Promise<T[]> {
- const url = this.baseUrl(projectId, docId)
- const response = await this.request.get(url)
- const items: T[] = response.data
- return items.map(item => this.model.valueOf(item))
- }
-
- public async create(projectId: string, docId: number, item: T): Promise<void> {
- const url = this.baseUrl(projectId, docId)
- await this.request.post(url, item.toObject())
- }
-
- public async delete(projectId: string, docId: number, annotationId: number): Promise<void> {
- const url = this.baseUrl(projectId, docId) + `/${annotationId}`
- await this.request.delete(url)
- }
-
- public async clear(projectId: string, docId: number): Promise<void> {
- const url = this.baseUrl(projectId, docId)
- await this.request.delete(url)
- }
-
- public async autoLabel(projectId: string, docId: number): Promise<void> {
- const url = `/projects/${projectId}/auto-labeling?example=${docId}`
- await this.request.post(url, {})
- }
+ constructor(private readonly model: any, readonly request = ApiService) {}
+
+ public async list(projectId: string, docId: number): Promise<T[]> {
+ const url = this.baseUrl(projectId, docId)
+ const response = await this.request.get(url)
+ const items: T[] = response.data
+ return items.map((item) => this.model.valueOf(item))
+ }
+
+ public async create(projectId: string, docId: number, item: T): Promise<void> {
+ const url = this.baseUrl(projectId, docId)
+ await this.request.post(url, item.toObject())
+ }
+
+ public async delete(projectId: string, docId: number, annotationId: number): Promise<void> {
+ const url = this.baseUrl(projectId, docId) + `/${annotationId}`
+ await this.request.delete(url)
+ }
+
+ public async clear(projectId: string, docId: number): Promise<void> {
+ const url = this.baseUrl(projectId, docId)
+ await this.request.delete(url)
+ }
+
+ public async autoLabel(projectId: string, docId: number): Promise<void> {
+ const url = `/projects/${projectId}/auto-labeling?example=${docId}`
+ await this.request.post(url, {})
+ }
- protected baseUrl(projectId: string, docId: number): string {
- return `/projects/${projectId}/examples/${docId}/annotations`
- }
+ protected baseUrl(projectId: string, docId: number): string {
+ return `/projects/${projectId}/examples/${docId}/annotations`
+ }
}
diff --git a/frontend/domain/models/tasks/interface.ts b/frontend/domain/models/tasks/interface.ts
index 065bdae10a..4781f3b56d 100644
--- a/frontend/domain/models/tasks/interface.ts
+++ b/frontend/domain/models/tasks/interface.ts
@@ -1,4 +1,3 @@
-
export interface AnnotationModel {
valueOf(values: object): any
toObject(): object
diff --git a/frontend/domain/models/tasks/relation.ts b/frontend/domain/models/tasks/relation.ts
index a8282f1000..420ab06d59 100644
--- a/frontend/domain/models/tasks/relation.ts
+++ b/frontend/domain/models/tasks/relation.ts
@@ -1,24 +1,26 @@
export class RelationItem {
- constructor(
- public id: number,
- public fromId: number,
- public toId: number,
- public type: number,
- ) {
- }
+ constructor(public id: number, public fromId: number, public toId: number, public type: number) {}
- static valueOf(
- {id, from_id, to_id, type}: { id: number, from_id: number, to_id: number, type: number }
- ): RelationItem {
- return new RelationItem(id, from_id, to_id, type)
- }
+ static valueOf({
+ id,
+ from_id,
+ to_id,
+ type
+ }: {
+ id: number
+ from_id: number
+ to_id: number
+ type: number
+ }): RelationItem {
+ return new RelationItem(id, from_id, to_id, type)
+ }
- toObject(): Object {
- return {
- id: this.id,
- from_id: this.fromId,
- to_id: this.toId,
- type: this.type,
- }
+ toObject(): Object {
+ return {
+ id: this.id,
+ from_id: this.fromId,
+ to_id: this.toId,
+ type: this.type
}
+ }
}
diff --git a/frontend/domain/models/tasks/relationRepository.ts b/frontend/domain/models/tasks/relationRepository.ts
index dd7bbe456e..a454539925 100644
--- a/frontend/domain/models/tasks/relationRepository.ts
+++ b/frontend/domain/models/tasks/relationRepository.ts
@@ -1,13 +1,18 @@
import { RelationItem } from '~/domain/models/tasks/relation'
export interface RelationRepository {
- list(projectId: string, exampleId: number): Promise<RelationItem[]>
+ list(projectId: string, exampleId: number): Promise<RelationItem[]>
- create(projectId: string, exampleId: number, relation: RelationItem): Promise<RelationItem>
+ create(projectId: string, exampleId: number, relation: RelationItem): Promise<RelationItem>
- update(projectId: string, exampleId: number, relationId: number, relationType: number): Promise<RelationItem>
+ update(
+ projectId: string,
+ exampleId: number,
+ relationId: number,
+ relationType: number
+ ): Promise<RelationItem>
- delete(projectId: string, exampleId: number, relationId: number): Promise<void>
+ delete(projectId: string, exampleId: number, relationId: number): Promise<void>
- bulkDelete(projectId: string, exampleId: number, relationIds: number[]): Promise<void>
+ bulkDelete(projectId: string, exampleId: number, relationIds: number[]): Promise<void>
}
diff --git a/frontend/domain/models/tasks/seq2seq.ts b/frontend/domain/models/tasks/seq2seq.ts
index bc86ef7690..273d76d41c 100644
--- a/frontend/domain/models/tasks/seq2seq.ts
+++ b/frontend/domain/models/tasks/seq2seq.ts
@@ -1,16 +1,9 @@
import { AnnotationModel } from './interface'
-export class Seq2seqLabel implements AnnotationModel{
- constructor(
- public id: number,
- public text: string,
- public user: number,
- ) {}
+export class Seq2seqLabel implements AnnotationModel {
+ constructor(public id: number, public text: string, public user: number) {}
- static valueOf(
- { id, text, user }:
- { id: number, text: string, user: number }
- ) {
+ static valueOf({ id, text, user }: { id: number; text: string; user: number }) {
return new Seq2seqLabel(id, text, user)
}
diff --git a/frontend/domain/models/tasks/sequenceLabeling.ts b/frontend/domain/models/tasks/sequenceLabeling.ts
index 64e009d712..37a7a84f67 100644
--- a/frontend/domain/models/tasks/sequenceLabeling.ts
+++ b/frontend/domain/models/tasks/sequenceLabeling.ts
@@ -9,10 +9,19 @@ export class Span implements AnnotationModel {
public endOffset: number
) {}
- static valueOf(
- { id, label, user, start_offset, end_offset }:
- { id: number, label: number, user: number, start_offset: number, end_offset: number }
- ) {
+ static valueOf({
+ id,
+ label,
+ user,
+ start_offset,
+ end_offset
+ }: {
+ id: number
+ label: number
+ user: number
+ start_offset: number
+ end_offset: number
+ }) {
return new Span(id, label, user, start_offset, end_offset)
}
diff --git a/frontend/domain/models/tasks/textClassification.ts b/frontend/domain/models/tasks/textClassification.ts
index 0c3c73b687..721c89d206 100644
--- a/frontend/domain/models/tasks/textClassification.ts
+++ b/frontend/domain/models/tasks/textClassification.ts
@@ -1,17 +1,10 @@
import { AnnotationModel } from './interface'
-export class TextClassificationItem implements AnnotationModel{
- constructor(
- public id: number,
- public label: number,
- public user: number,
- ) {}
+export class CategoryItem implements AnnotationModel {
+ constructor(public id: number, public label: number, public user: number) {}
- static valueOf(
- { id, label, user }:
- { id: number, label: number, user: number }
- ) {
- return new TextClassificationItem(id, label, user)
+ static valueOf({ id, label, user }: { id: number; label: number; user: number }) {
+ return new CategoryItem(id, label, user)
}
toObject() {
diff --git a/frontend/domain/models/upload/catalog.ts b/frontend/domain/models/upload/catalog.ts
index f6497bfa83..e13c713227 100644
--- a/frontend/domain/models/upload/catalog.ts
+++ b/frontend/domain/models/upload/catalog.ts
@@ -1,16 +1,16 @@
import { Expose } from 'class-transformer'
export class Catalog {
- name: string;
- example: string;
- properties: object;
+ name: string
+ example: string
+ properties: object
@Expose({ name: 'task_id' })
- taskId: string;
+ taskId: string
@Expose({ name: 'display_name' })
- displayName: string;
+ displayName: string
@Expose({ name: 'accept_types' })
- acceptTypes: string;
+ acceptTypes: string
}
diff --git a/frontend/domain/models/upload/catalogRepository.ts b/frontend/domain/models/upload/catalogRepository.ts
index 42c01f60e3..c39724d542 100644
--- a/frontend/domain/models/upload/catalogRepository.ts
+++ b/frontend/domain/models/upload/catalogRepository.ts
@@ -1,6 +1,5 @@
import { Catalog } from './catalog'
-
export interface CatalogRepository {
list(projectId: string): Promise<Catalog[]>
}
diff --git a/frontend/domain/models/upload/parseRepository.ts b/frontend/domain/models/upload/parseRepository.ts
index fcd5d94a3a..d8b2590744 100644
--- a/frontend/domain/models/upload/parseRepository.ts
+++ b/frontend/domain/models/upload/parseRepository.ts
@@ -1,5 +1,11 @@
export interface ParseRepository {
- analyze(projectId: string, format: string, task: string, uploadIds: number[], option: object): Promise<string>
+ analyze(
+ projectId: string,
+ format: string,
+ task: string,
+ uploadIds: number[],
+ option: object
+ ): Promise<string>
revert(serverId: string): void
}
diff --git a/frontend/domain/models/user/user.ts b/frontend/domain/models/user/user.ts
index 62c3e99021..bc88f68c46 100644
--- a/frontend/domain/models/user/user.ts
+++ b/frontend/domain/models/user/user.ts
@@ -1,14 +1,14 @@
import { Expose } from 'class-transformer'
export class UserItem {
- id: number;
- username: string;
-
+ id: number
+ username: string
+
@Expose({ name: 'is_superuser' })
- isSuperuser: boolean;
+ isSuperuser: boolean
@Expose({ name: 'is_staff' })
- isStaff: boolean;
+ isStaff: boolean
toObject(): Object {
return {
diff --git a/frontend/domain/models/utils/stepper.ts b/frontend/domain/models/utils/stepper.ts
index 5e169f6f58..a7e5a8c15c 100644
--- a/frontend/domain/models/utils/stepper.ts
+++ b/frontend/domain/models/utils/stepper.ts
@@ -1,16 +1,11 @@
export class StepCounter {
private step: number
- constructor(
- private readonly minStep: number = 1,
- private readonly maxStep: number = 10
- ) {
+ constructor(private readonly minStep: number = 1, private readonly maxStep: number = 10) {
this.step = 1
}
- static valueOf(
- minStep: number = 1, maxStep: number = 10
- ): StepCounter {
+ static valueOf(minStep: number = 1, maxStep: number = 10): StepCounter {
return new StepCounter(minStep, maxStep)
}
@@ -53,4 +48,4 @@ export class StepCounter {
isLast(): boolean {
return this.step === this.maxStep
}
-}
\ No newline at end of file
+}
diff --git a/frontend/i18n/de/projects/dataset.js b/frontend/i18n/de/projects/dataset.js
index edec027252..272ffc1a39 100644
--- a/frontend/i18n/de/projects/dataset.js
+++ b/frontend/i18n/de/projects/dataset.js
@@ -14,6 +14,7 @@ export default {
exportDataTitle: 'Exportiere Daten',
exportDataMessage: 'Wähle ein Dateiformat',
deleteDocumentsTitle: 'Dokument löschen',
- deleteDocumentsMessage: 'Bist du dir sicher, dass du die Dokumente aus dem Projekt löschen willst?',
+ deleteDocumentsMessage:
+ 'Bist du dir sicher, dass du die Dokumente aus dem Projekt löschen willst?',
pageText: '{0}-{1} von {2}'
}
diff --git a/frontend/i18n/de/projects/errors.js b/frontend/i18n/de/projects/errors.js
index c9ead2c6ab..cf96ab8196 100644
--- a/frontend/i18n/de/projects/errors.js
+++ b/frontend/i18n/de/projects/errors.js
@@ -1,5 +1,7 @@
export default {
- fileCannotUpload: 'Die Datei(en) konnten nicht hochgeladen werden. Vielleicht ungültiges Format.\n Bitte prüfe die verfügbaren Dateiformate und folgende Datei(en): ',
- labelCannotCreate: 'Das Label konnte nicht erstellt werden.\n Jeder Labelname und jedes Tastenkürzel kann nur einmal vergeben werden.',
+ fileCannotUpload:
+ 'Die Datei(en) konnten nicht hochgeladen werden. Vielleicht ungültiges Format.\n Bitte prüfe die verfügbaren Dateiformate und folgende Datei(en): ',
+ labelCannotCreate:
+ 'Das Label konnte nicht erstellt werden.\n Jeder Labelname und jedes Tastenkürzel kann nur einmal vergeben werden.',
invalidUserOrPass: 'Falscher Benutername oder falsches Passwort, oder etwas ist schief gelaufen.'
}
diff --git a/frontend/i18n/de/projects/overview.js b/frontend/i18n/de/projects/overview.js
index e7cbd754c9..b856fbbcb8 100644
--- a/frontend/i18n/de/projects/overview.js
+++ b/frontend/i18n/de/projects/overview.js
@@ -17,6 +17,6 @@ export default {
'Sequenz zu Sequenz',
'Intent-Erkennung und Slot-Füllung',
'Bildklassifikation',
- 'Sprache zu Text',
+ 'Sprache zu Text'
]
}
diff --git a/frontend/i18n/de/projects/statistics.js b/frontend/i18n/de/projects/statistics.js
index 57053ca1fa..b8f07d0726 100644
--- a/frontend/i18n/de/projects/statistics.js
+++ b/frontend/i18n/de/projects/statistics.js
@@ -1,9 +1,6 @@
export default {
statistics: 'Statistiken',
- progress: [
- 'Abgeschlossen',
- 'Unvollständig'
- ],
+ progress: ['Abgeschlossen', 'Unvollständig'],
labelStats: 'Labelstatistiken',
userStats: 'Nutzerstatistiken'
}
diff --git a/frontend/i18n/en/projects/errors.js b/frontend/i18n/en/projects/errors.js
index fcb938469b..568932c4de 100644
--- a/frontend/i18n/en/projects/errors.js
+++ b/frontend/i18n/en/projects/errors.js
@@ -1,5 +1,7 @@
export default {
- fileCannotUpload: 'The file(s) could not be uploaded. Maybe invalid format.\n Please check available formats and the following file(s): ',
- labelCannotCreate: 'The label could not be created.\n You cannot use the same label name or shortcut key.',
+ fileCannotUpload:
+ 'The file(s) could not be uploaded. Maybe invalid format.\n Please check available formats and the following file(s): ',
+ labelCannotCreate:
+ 'The label could not be created.\n You cannot use the same label name or shortcut key.',
invalidUserOrPass: 'Incorrect username or password, or something went wrong.'
}
diff --git a/frontend/i18n/en/projects/overview.js b/frontend/i18n/en/projects/overview.js
index 6be96ed1fe..22a78dc8e9 100644
--- a/frontend/i18n/en/projects/overview.js
+++ b/frontend/i18n/en/projects/overview.js
@@ -17,6 +17,6 @@ export default {
'Sequence to sequence',
'Intent Detection and Slot Filling',
'Image Classification',
- 'Speech to Text',
+ 'Speech to Text'
]
}
diff --git a/frontend/i18n/en/projects/statistics.js b/frontend/i18n/en/projects/statistics.js
index f19665917b..da646fe48d 100644
--- a/frontend/i18n/en/projects/statistics.js
+++ b/frontend/i18n/en/projects/statistics.js
@@ -1,9 +1,6 @@
export default {
statistics: 'Metrics',
- progress: [
- 'Completed',
- 'Incomplete'
- ],
+ progress: ['Completed', 'Incomplete'],
labelStats: 'Label stats',
userStats: 'User stats'
}
diff --git a/frontend/i18n/fr/home.js b/frontend/i18n/fr/home.js
index 3f6b188428..b77bce87bf 100644
--- a/frontend/i18n/fr/home.js
+++ b/frontend/i18n/fr/home.js
@@ -6,12 +6,12 @@ export default {
featuresTitle1: 'Collaboration des équipes',
featuresText1: 'Annotation avec vos coéquipiers',
featuresTitle2: 'Toute langue',
- featuresText2: 'Annotation dans n\'importe quelle langue',
+ featuresText2: "Annotation dans n'importe quelle langue",
featuresTitle3: 'Source ouverte',
featuresText3: 'Annotation gratuite et personnalisable',
footerTitle: 'Réalisez vos idées rapidement',
demoDropDown: 'Essayer la démo',
- demoNER: 'Reconnaissance de l\'entité désignée',
+ demoNER: "Reconnaissance de l'entité désignée",
demoSent: 'Analyse du sentiment',
demoTranslation: 'Traduction',
demoTextToSQL: 'Texte à SQL'
diff --git a/frontend/i18n/fr/projects/dataset.js b/frontend/i18n/fr/projects/dataset.js
index a28c22ff46..e88fa49d44 100644
--- a/frontend/i18n/fr/projects/dataset.js
+++ b/frontend/i18n/fr/projects/dataset.js
@@ -1,8 +1,8 @@
export default {
dataset: 'Ensemble de données',
actions: 'Actions',
- importDataset: 'Importer l\'ensemble de données',
- exportDataset: 'Exporter l\'ensemble de données',
+ importDataset: "Importer l'ensemble de données",
+ exportDataset: "Exporter l'ensemble de données",
text: 'Texte',
metadata: 'Métadonnées',
action: 'Action',
diff --git a/frontend/i18n/fr/projects/errors.js b/frontend/i18n/fr/projects/errors.js
index ff83e635a0..4b2308bea6 100644
--- a/frontend/i18n/fr/projects/errors.js
+++ b/frontend/i18n/fr/projects/errors.js
@@ -1,5 +1,7 @@
export default {
- fileCannotUpload: 'Le fichier n\'a pas pu être téléchargé. Peut-être un format non valide.\n Veuillez vérifier attentivement les formats disponibles.',
- labelCannotCreate: 'L\'étiquette n\'a pas pu être créé.\n Vous ne pouvez pas utiliser le même nom d\'étiquette ou la même raccourci clavier.',
- invalidUserOrPass: 'Nom d\'utilisateur ou mot de passe incorrect, ou quelque chose a mal tourné.'
+ fileCannotUpload:
+ "Le fichier n'a pas pu être téléchargé. Peut-être un format non valide.\n Veuillez vérifier attentivement les formats disponibles.",
+ labelCannotCreate:
+ "L'étiquette n'a pas pu être créé.\n Vous ne pouvez pas utiliser le même nom d'étiquette ou la même raccourci clavier.",
+ invalidUserOrPass: "Nom d'utilisateur ou mot de passe incorrect, ou quelque chose a mal tourné."
}
diff --git a/frontend/i18n/fr/projects/guideline.js b/frontend/i18n/fr/projects/guideline.js
index 75faf5a448..28da0e326a 100644
--- a/frontend/i18n/fr/projects/guideline.js
+++ b/frontend/i18n/fr/projects/guideline.js
@@ -1,4 +1,4 @@
export default {
guideline: 'Ligne directrice',
- writeGuidelinePrompt: 'Veuillez rédiger le guide d\'annotation.'
+ writeGuidelinePrompt: "Veuillez rédiger le guide d'annotation."
}
diff --git a/frontend/i18n/fr/projects/home.js b/frontend/i18n/fr/projects/home.js
index ed010b4294..7584ce7fe9 100644
--- a/frontend/i18n/fr/projects/home.js
+++ b/frontend/i18n/fr/projects/home.js
@@ -7,5 +7,5 @@ export default {
defineGuideline: 'Définir une ligne directrice pour le travail',
annotateDataset: 'Annoter un ensemble de données',
viewStatistics: 'Voir les statistiques',
- exportDataset: 'Exporter l\'ensemble de données'
+ exportDataset: "Exporter l'ensemble de données"
}
diff --git a/frontend/i18n/fr/projects/labels.js b/frontend/i18n/fr/projects/labels.js
index b98d5ba39b..ce3885fa76 100644
--- a/frontend/i18n/fr/projects/labels.js
+++ b/frontend/i18n/fr/projects/labels.js
@@ -5,15 +5,15 @@ export default {
createLabel: 'Créer un label',
importLabels: 'Importer des étiquettes',
exportLabels: 'Exporter des étiquettes',
- labelName: 'Nom de l\'étiquette',
- labelMessage: 'Le nom de l\'étiquette est obligatoire',
+ labelName: "Nom de l'étiquette",
+ labelMessage: "Le nom de l'étiquette est obligatoire",
createLink: 'Create Link',
linkName: 'Link name',
linkMessage: 'Link name is required',
key: 'Clé',
- deleteLabel: 'Supprimer l\'étiquette',
+ deleteLabel: "Supprimer l'étiquette",
deleteMessage: 'Êtes-vous sûr de vouloir supprimer ces étiquettes de ce projet ?',
- importTitle: 'Télécharger l\'étiquette',
+ importTitle: "Télécharger l'étiquette",
importMessage1: 'Exemple de format',
importMessage2: 'Sélectionnez un fichier',
filePlaceholder: 'Saisir un fichier'
diff --git a/frontend/i18n/fr/projects/members.js b/frontend/i18n/fr/projects/members.js
index e82845dff9..5c74de1d53 100644
--- a/frontend/i18n/fr/projects/members.js
+++ b/frontend/i18n/fr/projects/members.js
@@ -3,14 +3,14 @@ export default {
role: 'Rôle',
updateRole: 'Mettre à jour le rôle',
addMember: 'Ajouter un membre',
- userSearchAPIs: 'Rechercher des utilisateurs (avec l\'IPA)',
+ userSearchAPIs: "Rechercher des utilisateurs (avec l'IPA)",
userSearchPrompt: 'Commencez à taper pour rechercher',
removeMember: 'Supprimer un membre',
removePrompt: 'Êtes-vous sûr de vouloir supprimer ces membres ?',
roles: {
- projectAdmin: 'L\'administrateur du projet',
+ projectAdmin: "L'administrateur du projet",
annotator: 'Étiqueteuse',
- annotationApprover: 'Approbateur d\'étiquettes',
+ annotationApprover: "Approbateur d'étiquettes",
undefined: 'Aucun rôle défini'
}
}
diff --git a/frontend/i18n/fr/projects/overview.js b/frontend/i18n/fr/projects/overview.js
index a3179ad8ef..23b8555053 100644
--- a/frontend/i18n/fr/projects/overview.js
+++ b/frontend/i18n/fr/projects/overview.js
@@ -5,7 +5,7 @@ export default {
textClassification: 'Classification des textes',
sequenceLabeling: 'Étiquetage des séquences',
sequenceToSequence: 'Séquence à séquence',
- randomizeDocOrder: 'Randomiser l\'ordre des documents',
+ randomizeDocOrder: "Randomiser l'ordre des documents",
shareAnnotations: 'Partager les annotations entre tous les utilisateurs',
deleteProjectTitle: 'Supprimer le projet',
deleteProjectMessage: 'Êtes-vous sûr de vouloir supprimer ces projets ?',
@@ -13,8 +13,8 @@ export default {
'Classification des textes',
'Étiquetage des séquences',
'Séquence à séquence',
- 'Détection d\'intention et remplissage d\'emplacements',
- 'Classification d\'images',
- 'De la parole au texte',
+ "Détection d'intention et remplissage d'emplacements",
+ "Classification d'images",
+ 'De la parole au texte'
]
}
diff --git a/frontend/i18n/fr/projects/statistics.js b/frontend/i18n/fr/projects/statistics.js
index 83f0e99bac..c685efc0c0 100644
--- a/frontend/i18n/fr/projects/statistics.js
+++ b/frontend/i18n/fr/projects/statistics.js
@@ -1,9 +1,6 @@
export default {
statistics: 'Statistiques',
- progress: [
- 'Complété',
- 'Incomplet'
- ],
+ progress: ['Complété', 'Incomplet'],
labelStats: 'Étiqueter les stats',
userStats: 'Stats des utilisateurs'
}
diff --git a/frontend/i18n/fr/rules.js b/frontend/i18n/fr/rules.js
index 861b135bfc..23a23004aa 100644
--- a/frontend/i18n/fr/rules.js
+++ b/frontend/i18n/fr/rules.js
@@ -3,12 +3,12 @@ export default {
colorRequired: 'La couleur est obligatoire'
},
labelNameRules: {
- labelRequired: 'Le nom de l\'étiquette est obligatoire',
- labelLessThan100Chars: 'Le nom de l\'étiquette doit comporter moins de 100 caractères'
+ labelRequired: "Le nom de l'étiquette est obligatoire",
+ labelLessThan100Chars: "Le nom de l'étiquette doit comporter moins de 100 caractères"
},
userNameRules: {
- userNameRequired: 'Le nom d\'utilisateur est requis',
- userNameLessThan30Chars: 'Le nom d\'utilisateur doit comporter moins de 30 caractères'
+ userNameRequired: "Le nom d'utilisateur est requis",
+ userNameLessThan30Chars: "Le nom d'utilisateur doit comporter moins de 30 caractères"
},
roleRules: {
roleRequired: 'Rôle est obligatoire'
diff --git a/frontend/i18n/fr/user.js b/frontend/i18n/fr/user.js
index 47c729a76c..06f4474784 100644
--- a/frontend/i18n/fr/user.js
+++ b/frontend/i18n/fr/user.js
@@ -1,6 +1,6 @@
export default {
login: 'Connexion',
signOut: 'Déconnexion',
- username: 'Nom d\'utilisateur',
+ username: "Nom d'utilisateur",
password: 'Mot de passe'
}
diff --git a/frontend/i18n/zh/projects/overview.js b/frontend/i18n/zh/projects/overview.js
index f8c5878d59..9ae79f040a 100644
--- a/frontend/i18n/zh/projects/overview.js
+++ b/frontend/i18n/zh/projects/overview.js
@@ -11,12 +11,5 @@ export default {
deleteProjectTitle: '删除项目',
deleteProjectMessage: '你确定要删除这些项目吗?',
search: '搜索',
- projectTypes: [
- '文本分类',
- '序列标注',
- '序列到序列',
- '意图检测和槽填充',
- '图像分类',
- '文字转语音'
- ]
+ projectTypes: ['文本分类', '序列标注', '序列到序列', '意图检测和槽填充', '图像分类', '文字转语音']
}
diff --git a/frontend/i18n/zh/projects/statistics.js b/frontend/i18n/zh/projects/statistics.js
index 602c70ce1f..180cf7cb24 100644
--- a/frontend/i18n/zh/projects/statistics.js
+++ b/frontend/i18n/zh/projects/statistics.js
@@ -1,9 +1,6 @@
export default {
statistics: '统计',
- progress: [
- '已完成',
- '未完成'
- ],
+ progress: ['已完成', '未完成'],
labelStats: '标签统计',
userStats: '用户统计'
}
diff --git a/frontend/jest.config.js b/frontend/jest.config.js
index 9c167fabb5..c39293f3d4 100644
--- a/frontend/jest.config.js
+++ b/frontend/jest.config.js
@@ -3,7 +3,7 @@ module.exports = {
'^@/(.*)$': '<rootDir>/$1',
'^~/(.*)$': '<rootDir>/$1',
'^vue$': 'vue/dist/vue.common.js',
- "^.+\\.(css|styl|less|sass|scss|png|jpg|ttf|woff|woff2)$": "jest-transform-stub"
+ '^.+\\.(css|styl|less|sass|scss|png|jpg|ttf|woff|woff2)$': 'jest-transform-stub'
},
moduleFileExtensions: ['js', 'vue', 'json'],
transform: {
@@ -12,8 +12,5 @@ module.exports = {
'^.+\\.(css|styl|less|sass|scss|png|jpg|ttf|woff|woff2)$': 'jest-transform-stub'
},
collectCoverage: true,
- collectCoverageFrom: [
- '<rootDir>/components/**/*.vue',
- '<rootDir>/pages/**/*.vue'
- ]
+ collectCoverageFrom: ['<rootDir>/components/**/*.vue', '<rootDir>/pages/**/*.vue']
}
diff --git a/frontend/layouts/default.vue b/frontend/layouts/default.vue
index a700aa297d..f55dd13c6e 100644
--- a/frontend/layouts/default.vue
+++ b/frontend/layouts/default.vue
@@ -14,6 +14,6 @@ export default {
components: {
TheFooter,
TheHeader
- },
+ }
}
</script>
diff --git a/frontend/layouts/demo.vue b/frontend/layouts/demo.vue
index 2bf94d4497..3ed281accc 100644
--- a/frontend/layouts/demo.vue
+++ b/frontend/layouts/demo.vue
@@ -11,6 +11,6 @@ import TheHeader from '~/components/layout/TheHeader'
export default {
components: {
TheHeader
- },
+ }
}
</script>
diff --git a/frontend/layouts/error.vue b/frontend/layouts/error.vue
index dda5a2bf2c..c8c8b18cc3 100644
--- a/frontend/layouts/error.vue
+++ b/frontend/layouts/error.vue
@@ -1,22 +1,11 @@
<template>
<v-app dark>
- <v-container
- fill-height
- style="height: calc(100vh - 58px);"
- >
+ <v-container fill-height style="height: calc(100vh - 58px)">
<v-layout align-center>
<v-flex text-center>
- <h1 class="display-2 primary--text">
- Whoops, 404
- </h1>
+ <h1 class="display-2 primary--text">Whoops, 404</h1>
<p>The page you were looking for does not exist</p>
- <v-btn
- to="/"
- outlined
- color="primary"
- >
- Get me out of here!
- </v-btn>
+ <v-btn to="/" outlined color="primary"> Get me out of here! </v-btn>
</v-flex>
</v-layout>
</v-container>
@@ -35,12 +24,11 @@ export default {
data() {
return {
pageNotFound: '404 Not Found',
- otherError: 'The page you were looking for wasn\'t allowed to access.'
+ otherError: "The page you were looking for wasn't allowed to access."
}
},
head() {
- const title =
- this.error.statusCode === 404 ? this.pageNotFound : this.otherError
+ const title = this.error.statusCode === 404 ? this.pageNotFound : this.otherError
return {
title
}
diff --git a/frontend/layouts/project.vue b/frontend/layouts/project.vue
index 1b8b497912..d4b66e23bb 100644
--- a/frontend/layouts/project.vue
+++ b/frontend/layouts/project.vue
@@ -6,27 +6,13 @@
</template>
</the-header>
- <v-navigation-drawer
- v-model="drawerLeft"
- app
- clipped
- color=""
- >
- <the-side-bar
- :link="getLink"
- :is-project-admin="isProjectAdmin"
- :project="currentProject"
- />
+ <v-navigation-drawer v-model="drawerLeft" app clipped color="">
+ <the-side-bar :link="getLink" :is-project-admin="isProjectAdmin" :project="currentProject" />
</v-navigation-drawer>
<v-main>
- <v-container
- fluid
- fill-height
- >
- <v-layout
- justify-center
- >
+ <v-container fluid fill-height>
+ <v-layout justify-center>
<v-flex fill-height>
<nuxt />
</v-flex>
@@ -42,7 +28,6 @@ import TheSideBar from '~/components/layout/TheSideBar'
import TheHeader from '~/components/layout/TheHeader'
export default {
-
components: {
TheSideBar,
TheHeader
@@ -52,12 +37,12 @@ export default {
data() {
return {
drawerLeft: null,
- isProjectAdmin: false,
+ isProjectAdmin: false
}
},
computed: {
- ...mapGetters('projects', ['getLink', 'currentProject']),
+ ...mapGetters('projects', ['getLink', 'currentProject'])
},
async created() {
diff --git a/frontend/layouts/projects.vue b/frontend/layouts/projects.vue
index 2987d50cb8..56056101f3 100644
--- a/frontend/layouts/projects.vue
+++ b/frontend/layouts/projects.vue
@@ -2,13 +2,8 @@
<v-app>
<the-header />
<v-main>
- <v-container
- fluid
- fill-height
- >
- <v-layout
- justify-center
- >
+ <v-container fluid fill-height>
+ <v-layout justify-center>
<v-flex>
<nuxt />
</v-flex>
diff --git a/frontend/layouts/workspace.vue b/frontend/layouts/workspace.vue
index 6777dd6e2a..5bab94662b 100644
--- a/frontend/layouts/workspace.vue
+++ b/frontend/layouts/workspace.vue
@@ -6,16 +6,8 @@
</template>
</the-header>
- <v-navigation-drawer
- v-model="drawerLeft"
- app
- clipped
- >
- <the-side-bar
- :link="getLink"
- :is-project-admin="isProjectAdmin"
- :project="currentProject"
- />
+ <v-navigation-drawer v-model="drawerLeft" app clipped>
+ <the-side-bar :link="getLink" :is-project-admin="isProjectAdmin" :project="currentProject" />
</v-navigation-drawer>
<v-main class="pb-0">
@@ -30,7 +22,6 @@ import TheHeader from '~/components/layout/TheHeader'
import TheSideBar from '~/components/layout/TheSideBar'
export default {
-
components: {
TheSideBar,
TheHeader
@@ -45,9 +36,9 @@ export default {
},
computed: {
- ...mapGetters('projects', ['getLink', 'currentProject']),
+ ...mapGetters('projects', ['getLink', 'currentProject'])
},
-
+
watch: {
'$route.query'() {
this.$services.option.save(this.$route.params.id, this.$route.query)
diff --git a/frontend/middleware/auth.js b/frontend/middleware/auth.js
index 9f4ede2c07..8957e44964 100644
--- a/frontend/middleware/auth.js
+++ b/frontend/middleware/auth.js
@@ -1,4 +1,4 @@
-export default function({ store, redirect }) {
+export default function ({ store, redirect }) {
if (!store.getters['auth/isAuthenticated']) {
redirect('/auth')
}
diff --git a/frontend/middleware/check-admin.js b/frontend/middleware/check-admin.js
index f6028c0ee4..29b647e528 100644
--- a/frontend/middleware/check-admin.js
+++ b/frontend/middleware/check-admin.js
@@ -1,9 +1,9 @@
import _ from 'lodash'
-export default _.debounce(async function({ app, store, route, redirect }) {
+export default _.debounce(async function ({ app, store, route, redirect }) {
try {
await store.dispatch('projects/setCurrentProject', route.params.id)
- } catch(e) {
+ } catch (e) {
redirect('/projects')
}
const isProjectAdmin = await app.$services.member.isProjectAdmin(route.params.id)
diff --git a/frontend/middleware/check-auth.js b/frontend/middleware/check-auth.js
index 358f7abd74..1d205554d5 100644
--- a/frontend/middleware/check-auth.js
+++ b/frontend/middleware/check-auth.js
@@ -1,4 +1,4 @@
-export default async function({ store }) {
+export default async function ({ store }) {
if (!store.getters['auth/isAuthenticated'] || !store.getters['auth/getUsername']) {
await store.dispatch('auth/initAuth')
}
diff --git a/frontend/middleware/set-project.js b/frontend/middleware/set-project.js
index 5f46022946..935eaf1bf6 100644
--- a/frontend/middleware/set-project.js
+++ b/frontend/middleware/set-project.js
@@ -1,4 +1,4 @@
-export default async function({ store, route }) {
+export default async function ({ store, route }) {
const project = store.getters['projects/currentProject']
const isEmpty = Object.keys(project).length === 0 && project.constructor === Object
if (isEmpty) {
diff --git a/frontend/nuxt.config.js b/frontend/nuxt.config.js
index 53c8832c9f..b398810b85 100644
--- a/frontend/nuxt.config.js
+++ b/frontend/nuxt.config.js
@@ -4,19 +4,21 @@ import i18n from './i18n'
export default {
ssr: false,
/*
- ** Headers of the page
- */
+ ** Headers of the page
+ */
head: {
titleTemplate: '%s - ' + process.env.npm_package_name,
title: process.env.npm_package_name || '',
meta: [
{ charset: 'utf-8' },
{ name: 'viewport', content: 'width=device-width, initial-scale=1' },
- { hid: 'description', name: 'description', content: process.env.npm_package_description || '' }
+ {
+ hid: 'description',
+ name: 'description',
+ content: process.env.npm_package_description || ''
+ }
],
- link: [
- { rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' },
- ]
+ link: [{ rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' }]
},
server: {
@@ -28,17 +30,16 @@ export default {
},
/*
- ** Customize the progress-bar color
- */
+ ** Customize the progress-bar color
+ */
loading: { color: '#fff' },
/*
- ** Global CSS
- */
- css: [
- ],
+ ** Global CSS
+ */
+ css: [],
/*
- ** Plugins to load before mounting the App
- */
+ ** Plugins to load before mounting the App
+ */
plugins: [
'~/plugins/filters.js',
'~/plugins/vue-youtube.js',
@@ -48,8 +49,8 @@ export default {
'~/plugins/role.ts'
],
/*
- ** Nuxt.js modules
- */
+ ** Nuxt.js modules
+ */
modules: [
['nuxt-i18n', i18n],
// Doc: https://axios.nuxtjs.org/usage
@@ -60,9 +61,12 @@ export default {
buildModules: [
'@nuxt/typescript-build',
'@nuxtjs/composition-api/module',
- ['@nuxtjs/google-analytics', {
- id: process.env.GOOGLE_TRACKING_ID
- }],
+ [
+ '@nuxtjs/google-analytics',
+ {
+ id: process.env.GOOGLE_TRACKING_ID
+ }
+ ],
[
'@nuxtjs/vuetify',
{
@@ -70,9 +74,9 @@ export default {
treeShake: true,
defaultAssets: {
font: false,
- icons: ['mdiSvg'],
- },
- },
+ icons: ['mdiSvg']
+ }
+ }
],
[
'@nuxtjs/google-fonts',
@@ -83,14 +87,14 @@ export default {
display: 'swap',
download: true,
overwriting: true,
- inject: true,
+ inject: true
}
]
],
/*
- ** Axios module configuration
- ** See https://axios.nuxtjs.org/options
- */
+ ** Axios module configuration
+ ** See https://axios.nuxtjs.org/options
+ */
axios: {
proxy: true
},
@@ -102,9 +106,9 @@ export default {
}
},
/*
- ** vuetify module configuration
- ** https://github.com/nuxt-community/vuetify-module
- */
+ ** vuetify module configuration
+ ** https://github.com/nuxt-community/vuetify-module
+ */
vuetify: {
theme: {
primary: colors.blue.darken2,
@@ -137,12 +141,12 @@ export default {
}
},
/*
- ** Build configuration
- */
+ ** Build configuration
+ */
build: {
/*
- ** You can extend webpack config here
- */
+ ** You can extend webpack config here
+ */
publicPath: process.env.PUBLIC_PATH || '/_nuxt/',
extend(config, _) {
// config.module.rules.push({
@@ -165,6 +169,6 @@ export default {
name: '[path][name].[ext]'
}
})
- },
+ }
}
}
diff --git a/frontend/package.json b/frontend/package.json
index 29de78e8cb..8d5e17f9c3 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -8,6 +8,8 @@
"lint": "eslint --ext .ts,.js,.vue --ignore-path .gitignore .",
"lintfix": "eslint --fix --ext .ts,.js,.vue --ignore-path .gitignore .",
"precommit": "yarn lint",
+ "lint:prettier": "prettier --check .",
+ "fix:prettier": "prettier --write .",
"test": "jest",
"dev": "nuxt",
"build": "nuxt build",
diff --git a/frontend/pages/auth.vue b/frontend/pages/auth.vue
index ed928f5a0a..a223e9d040 100644
--- a/frontend/pages/auth.vue
+++ b/frontend/pages/auth.vue
@@ -4,9 +4,7 @@
<v-container class="fill-height" fluid>
<v-row align="center" justify="center">
<v-col cols="12" sm="8" md="4">
- <form-login
- :login="authenticateUser"
- />
+ <form-login :login="authenticateUser" />
</v-col>
</v-row>
</v-container>
diff --git a/frontend/pages/demo/image-classification/index.vue b/frontend/pages/demo/image-classification/index.vue
index 9271a777d7..8bfc768b82 100644
--- a/frontend/pages/demo/image-classification/index.vue
+++ b/frontend/pages/demo/image-classification/index.vue
@@ -14,12 +14,7 @@
/>
</v-card-title>
<v-divider />
- <v-img
- contain
- :src="currentDoc.filename"
- max-height="300"
- class="grey lighten-2"
- />
+ <v-img contain :src="currentDoc.filename" max-height="300" class="grey lighten-2" />
</v-card>
</v-col>
<v-col cols="12" md="3">
@@ -35,7 +30,6 @@ import ListMetadata from '@/components/tasks/metadata/ListMetadata'
import LabelGroup from '@/components/tasks/textClassification/LabelGroup'
export default {
-
components: {
LabelGroup,
ListMetadata
@@ -78,14 +72,16 @@ export default {
meta: {
url: 'https://github.com/Hironsan'
},
- annotation_approver: null,
+ annotation_approver: null
}
}
},
methods: {
removeLabel(annotationId) {
- this.currentDoc.annotations = this.currentDoc.annotations.filter(item => item.id !== annotationId)
+ this.currentDoc.annotations = this.currentDoc.annotations.filter(
+ (item) => item.id !== annotationId
+ )
},
addLabel(labelId) {
const payload = {
diff --git a/frontend/pages/demo/intent-detection-and-slot-filling/index.vue b/frontend/pages/demo/intent-detection-and-slot-filling/index.vue
index e32906bb53..f5bbc02efb 100644
--- a/frontend/pages/demo/intent-detection-and-slot-filling/index.vue
+++ b/frontend/pages/demo/intent-detection-and-slot-filling/index.vue
@@ -1,4 +1,3 @@
-
<template>
<v-main>
<v-container fluid>
@@ -58,7 +57,7 @@ export default {
prefixKey: null,
suffixKey: 'l',
color: '#fbb028',
- textColor: '#ffffff',
+ textColor: '#ffffff'
},
{
id: 5,
@@ -66,7 +65,7 @@ export default {
prefixKey: null,
suffixKey: 'm',
color: '#7c20e0',
- textColor: '#000000',
+ textColor: '#000000'
},
{
id: 6,
@@ -74,7 +73,7 @@ export default {
prefixKey: null,
suffixKey: 'o',
color: '#e6d176',
- textColor: '#000000',
+ textColor: '#000000'
},
{
id: 7,
@@ -82,7 +81,7 @@ export default {
prefixKey: null,
suffixKey: 'p',
color: '#6a74b9',
- textColor: '#ffffff',
+ textColor: '#ffffff'
}
],
items: [
@@ -123,7 +122,7 @@ export default {
exclusive: true,
currentDoc: {
id: 8,
- text: "I want to fly from Boston at 8:38 am and arrive in Denver at 11:10 in the morning.",
+ text: 'I want to fly from Boston at 8:38 am and arrive in Denver at 11:10 in the morning.',
annotations: [
{
id: 17,
@@ -131,7 +130,7 @@ export default {
label: 4,
startOffset: 19,
endOffset: 25,
- user: 1,
+ user: 1
},
{
id: 19,
@@ -139,7 +138,7 @@ export default {
label: 7,
startOffset: 29,
endOffset: 36,
- user: 1,
+ user: 1
},
{
id: 16,
@@ -147,7 +146,7 @@ export default {
label: 4,
startOffset: 51,
endOffset: 57,
- user: 1,
+ user: 1
},
{
id: 18,
@@ -155,8 +154,8 @@ export default {
label: 7,
startOffset: 61,
endOffset: 66,
- user: 1,
- },
+ user: 1
+ }
],
meta: { wikiPageId: 2 },
annotation_approver: null
@@ -166,11 +165,13 @@ export default {
methods: {
deleteEntity(annotationId) {
- this.currentDoc.annotations = this.currentDoc.annotations.filter(item => item.id !== annotationId)
+ this.currentDoc.annotations = this.currentDoc.annotations.filter(
+ (item) => item.id !== annotationId
+ )
},
updateEntity(annotationId, labelId) {
- const index = this.currentDoc.annotations.findIndex(item => item.id === annotationId)
+ const index = this.currentDoc.annotations.findIndex((item) => item.id === annotationId)
this.currentDoc.annotations[index].label = labelId
},
@@ -185,7 +186,7 @@ export default {
},
removeLabel(annotationId) {
- this.categoryAnnotations = this.categoryAnnotations.filter(item => item.id !== annotationId)
+ this.categoryAnnotations = this.categoryAnnotations.filter((item) => item.id !== annotationId)
},
addLabel(labelId) {
@@ -203,7 +204,7 @@ export default {
font-size: 1.25rem !important;
font-weight: 500;
line-height: 2rem;
- font-family: "Roboto", sans-serif !important;
+ font-family: 'Roboto', sans-serif !important;
opacity: 0.6;
}
</style>
diff --git a/frontend/pages/demo/named-entity-recognition/index.vue b/frontend/pages/demo/named-entity-recognition/index.vue
index 60c0477a97..936f947c55 100644
--- a/frontend/pages/demo/named-entity-recognition/index.vue
+++ b/frontend/pages/demo/named-entity-recognition/index.vue
@@ -1,4 +1,3 @@
-
<template>
<v-main>
<v-container fluid>
@@ -7,12 +6,13 @@
text
outlined
class="text-capitalize mr-2"
- @click="allowOverlapping=!allowOverlapping"
+ @click="allowOverlapping = !allowOverlapping"
>
Overlapping({{ allowOverlapping }})
</v-btn>
- <v-btn text outlined @click="rtl=!rtl">
- RTL(<span class="text-capitalize">{{ rtl }}</span>)
+ <v-btn text outlined @click="rtl = !rtl">
+ RTL(<span class="text-capitalize">{{ rtl }}</span
+ >)
</v-btn>
</div>
<v-row justify="center">
@@ -49,7 +49,7 @@ import ListMetadata from '@/components/tasks/metadata/ListMetadata'
export default {
components: {
EntityEditor,
- ListMetadata,
+ ListMetadata
},
layout: 'demo',
data() {
@@ -63,7 +63,7 @@ export default {
prefixKey: null,
suffixKey: 'l',
color: '#7c20e0',
- textColor: '#ffffff',
+ textColor: '#ffffff'
},
{
id: 5,
@@ -71,7 +71,7 @@ export default {
prefixKey: null,
suffixKey: 'm',
color: '#fbb028',
- textColor: '#000000',
+ textColor: '#000000'
},
{
id: 6,
@@ -79,7 +79,7 @@ export default {
prefixKey: null,
suffixKey: 'o',
color: '#e6d176',
- textColor: '#000000',
+ textColor: '#000000'
},
{
id: 7,
@@ -87,7 +87,7 @@ export default {
prefixKey: null,
suffixKey: 'p',
color: '#6a74b9',
- textColor: '#ffffff',
+ textColor: '#ffffff'
}
],
relations: [
@@ -95,15 +95,15 @@ export default {
id: 0,
fromId: 16,
toId: 17,
- labelId: 0,
- },
+ labelId: 0
+ }
],
relationLabels: [
{
id: 0,
- text: "isLorem",
- color: "#ffffff",
- },
+ text: 'isLorem',
+ color: '#ffffff'
+ }
],
currentDoc: {
id: 8,
@@ -115,7 +115,7 @@ export default {
label: 4,
startOffset: 60,
endOffset: 70,
- user: 1,
+ user: 1
},
{
id: 19,
@@ -123,7 +123,7 @@ export default {
label: 4,
startOffset: 164,
endOffset: 171,
- user: 1,
+ user: 1
},
{
id: 16,
@@ -131,7 +131,7 @@ export default {
label: 6,
startOffset: 14,
endOffset: 22,
- user: 1,
+ user: 1
},
{
id: 18,
@@ -139,7 +139,7 @@ export default {
label: 6,
startOffset: 72,
endOffset: 86,
- user: 1,
+ user: 1
},
{
id: 20,
@@ -147,8 +147,8 @@ export default {
label: 7,
startOffset: 180,
endOffset: 192,
- user: 1,
- },
+ user: 1
+ }
],
meta: { wikiPageId: 2 },
annotation_approver: null
@@ -163,13 +163,16 @@ export default {
if (this.rtl) {
this.currentDoc.text = 'داستان SVG Tiny 1.2 طولا ني است.'
} else {
- this.currentDoc.text = 'After bowling Somerset out for 83 on the opening morning at Grace Road, Leicestershire extended their first innings by 94 runs before being bowled out for 296 with England discard Andy Caddick taking three for 83.'
+ this.currentDoc.text =
+ 'After bowling Somerset out for 83 on the opening morning at Grace Road, Leicestershire extended their first innings by 94 runs before being bowled out for 296 with England discard Andy Caddick taking three for 83.'
}
}
},
methods: {
deleteEntity(annotationId) {
- this.currentDoc.annotations = this.currentDoc.annotations.filter(item => item.id !== annotationId)
+ this.currentDoc.annotations = this.currentDoc.annotations.filter(
+ (item) => item.id !== annotationId
+ )
this.relations.forEach((r) => {
if (r.fromId === annotationId || r.toId === annotationId) {
this.deleteRelation(r.id)
@@ -177,7 +180,7 @@ export default {
})
},
updateEntity(annotationId, labelId) {
- const index = this.currentDoc.annotations.findIndex(item => item.id === annotationId)
+ const index = this.currentDoc.annotations.findIndex((item) => item.id === annotationId)
this.currentDoc.annotations[index].label = labelId
},
addEntity(startOffset, endOffset, labelId) {
@@ -190,7 +193,7 @@ export default {
this.currentDoc.annotations.push(payload)
},
deleteRelation(relationId) {
- this.relations = this.relations.filter(item => item.id !== relationId)
+ this.relations = this.relations.filter((item) => item.id !== relationId)
}
}
}
@@ -200,7 +203,7 @@ export default {
font-size: 1.25rem !important;
font-weight: 500;
line-height: 2rem;
- font-family: "Roboto", sans-serif !important;
+ font-family: 'Roboto', sans-serif !important;
opacity: 0.6;
}
</style>
diff --git a/frontend/pages/demo/sentiment-analysis/index.vue b/frontend/pages/demo/sentiment-analysis/index.vue
index 75b6ff5bd5..dbf5a6fda4 100644
--- a/frontend/pages/demo/sentiment-analysis/index.vue
+++ b/frontend/pages/demo/sentiment-analysis/index.vue
@@ -32,7 +32,6 @@ import ListMetadata from '@/components/tasks/metadata/ListMetadata'
import LabelGroup from '@/components/tasks/textClassification/LabelGroup'
export default {
-
components: {
LabelGroup,
ListMetadata
@@ -72,7 +71,7 @@ export default {
document: 8
}
],
- meta: { wikiPageId: 2},
+ meta: { wikiPageId: 2 },
annotation_approver: null
}
}
@@ -80,7 +79,9 @@ export default {
methods: {
removeLabel(annotationId) {
- this.currentDoc.annotations = this.currentDoc.annotations.filter(item => item.id !== annotationId)
+ this.currentDoc.annotations = this.currentDoc.annotations.filter(
+ (item) => item.id !== annotationId
+ )
},
addLabel(labelId) {
const payload = {
diff --git a/frontend/pages/demo/speech-to-text/index.vue b/frontend/pages/demo/speech-to-text/index.vue
index b278658dc3..8e68e64453 100644
--- a/frontend/pages/demo/speech-to-text/index.vue
+++ b/frontend/pages/demo/speech-to-text/index.vue
@@ -3,12 +3,7 @@
<v-container fluid>
<v-row justify="center">
<v-col cols="12" md="9">
- <audio
- controls
- :src="src"
- class="mt-2 mb-5"
- style="width:100%;"
- >
+ <audio controls :src="src" class="mt-2 mb-5" style="width: 100%">
Your browser does not support the
<code>audio</code> element.
</audio>
@@ -33,7 +28,6 @@ import ListMetadata from '@/components/tasks/metadata/ListMetadata'
import Seq2seqBox from '~/components/tasks/seq2seq/Seq2seqBox'
export default {
-
components: {
Seq2seqBox,
ListMetadata
@@ -48,7 +42,7 @@ export default {
annotations: [
{
id: 17,
- text: "Hi! Welcome to doccano!",
+ text: 'Hi! Welcome to doccano!',
user: 1,
document: 8
}
@@ -64,10 +58,12 @@ export default {
methods: {
_deleteAnnotation(annotationId) {
- this.currentDoc.annotations = this.currentDoc.annotations.filter(item => item.id !== annotationId)
+ this.currentDoc.annotations = this.currentDoc.annotations.filter(
+ (item) => item.id !== annotationId
+ )
},
_updateAnnotation(annotationId, text) {
- const index = this.currentDoc.annotations.findIndex(item => item.id === annotationId)
+ const index = this.currentDoc.annotations.findIndex((item) => item.id === annotationId)
this.currentDoc.annotations[index].text = text
},
_createAnnotation(text) {
diff --git a/frontend/pages/demo/text-to-sql/index.vue b/frontend/pages/demo/text-to-sql/index.vue
index a06441762e..0ee10ef7d2 100644
--- a/frontend/pages/demo/text-to-sql/index.vue
+++ b/frontend/pages/demo/text-to-sql/index.vue
@@ -3,9 +3,7 @@
<v-container fluid>
<v-row justify="center">
<v-col cols="12" md="9">
- <v-card
- class="title mb-5"
- >
+ <v-card class="title mb-5">
<v-card-text class="title">
{{ currentDoc.text }}
</v-card-text>
@@ -31,7 +29,6 @@ import ListMetadata from '@/components/tasks/metadata/ListMetadata'
import Seq2seqBox from '~/components/tasks/seq2seq/Seq2seqBox'
export default {
-
components: {
Seq2seqBox,
ListMetadata
@@ -52,15 +49,15 @@ export default {
}
],
meta: {
- "department.department_id": "INT",
- "department.name": "CHAR",
- "department.num_employee": "INT",
- "head.head_id": "INT",
- "head.name": "INT",
- "head.age": "INT",
- "management.department_id": "INT",
- "management.head_id": "INT",
- "management.temporary_acting": "VARCHAR"
+ 'department.department_id': 'INT',
+ 'department.name': 'CHAR',
+ 'department.num_employee': 'INT',
+ 'head.head_id': 'INT',
+ 'head.name': 'INT',
+ 'head.age': 'INT',
+ 'management.department_id': 'INT',
+ 'management.head_id': 'INT',
+ 'management.temporary_acting': 'VARCHAR'
},
annotation_approver: null
}
@@ -69,10 +66,12 @@ export default {
methods: {
_deleteAnnotation(annotationId) {
- this.currentDoc.annotations = this.currentDoc.annotations.filter(item => item.id !== annotationId)
+ this.currentDoc.annotations = this.currentDoc.annotations.filter(
+ (item) => item.id !== annotationId
+ )
},
_updateAnnotation(annotationId, text) {
- const index = this.currentDoc.annotations.findIndex(item => item.id === annotationId)
+ const index = this.currentDoc.annotations.findIndex((item) => item.id === annotationId)
this.currentDoc.annotations[index].text = text
},
_createAnnotation(text) {
diff --git a/frontend/pages/demo/translation/index.vue b/frontend/pages/demo/translation/index.vue
index cbc9e0e05d..e1645d3cd7 100644
--- a/frontend/pages/demo/translation/index.vue
+++ b/frontend/pages/demo/translation/index.vue
@@ -3,9 +3,7 @@
<v-container fluid>
<v-row justify="center">
<v-col cols="12" md="9">
- <v-card
- class="title mb-5"
- >
+ <v-card class="title mb-5">
<v-card-text class="title">
{{ currentDoc.text }}
</v-card-text>
@@ -31,7 +29,6 @@ import ListMetadata from '@/components/tasks/metadata/ListMetadata'
import Seq2seqBox from '~/components/tasks/seq2seq/Seq2seqBox'
export default {
-
components: {
Seq2seqBox,
ListMetadata
@@ -57,7 +54,7 @@ export default {
document: 8
}
],
- meta: { wikiPageId: 2},
+ meta: { wikiPageId: 2 },
annotation_approver: null
}
}
@@ -65,10 +62,12 @@ export default {
methods: {
_deleteAnnotation(annotationId) {
- this.currentDoc.annotations = this.currentDoc.annotations.filter(item => item.id !== annotationId)
+ this.currentDoc.annotations = this.currentDoc.annotations.filter(
+ (item) => item.id !== annotationId
+ )
},
_updateAnnotation(annotationId, text) {
- const index = this.currentDoc.annotations.findIndex(item => item.id === annotationId)
+ const index = this.currentDoc.annotations.findIndex((item) => item.id === annotationId)
this.currentDoc.annotations[index].text = text
},
_createAnnotation(text) {
diff --git a/frontend/pages/projects/_id/comments/index.vue b/frontend/pages/projects/_id/comments/index.vue
index 55fb9449e0..80214e3f3c 100644
--- a/frontend/pages/projects/_id/comments/index.vue
+++ b/frontend/pages/projects/_id/comments/index.vue
@@ -5,16 +5,12 @@
class="text-capitalize ms-2"
:disabled="!canDelete"
outlined
- @click.stop="dialogDelete=true"
+ @click.stop="dialogDelete = true"
>
{{ $t('generic.delete') }}
</v-btn>
<v-dialog v-model="dialogDelete">
- <form-delete
- :selected="selected"
- @cancel="dialogDelete=false"
- @remove="remove"
- />
+ <form-delete :selected="selected" @cancel="dialogDelete = false" @remove="remove" />
</v-dialog>
</v-card-title>
<comment-list
@@ -37,7 +33,6 @@ import { ProjectDTO } from '~/services/application/project/projectData'
import FormDelete from '~/components/comment/FormDelete.vue'
export default Vue.extend({
-
components: {
CommentList,
FormDelete
@@ -75,11 +70,10 @@ export default Vue.extend({
},
watch: {
- '$route.query': _.debounce(function() {
- // @ts-ignore
- this.$fetch()
- }, 1000
- ),
+ '$route.query': _.debounce(function () {
+ // @ts-ignore
+ this.$fetch()
+ }, 1000)
},
methods: {
diff --git a/frontend/pages/projects/_id/dataset/export.vue b/frontend/pages/projects/_id/dataset/export.vue
index 16785cfb1c..c9dffe5e2c 100644
--- a/frontend/pages/projects/_id/dataset/export.vue
+++ b/frontend/pages/projects/_id/dataset/export.vue
@@ -5,15 +5,9 @@
</v-card-title>
<v-card-text>
<v-overlay :value="isProcessing">
- <v-progress-circular
- indeterminate
- size="64"
- />
+ <v-progress-circular indeterminate size="64" />
</v-overlay>
- <v-form
- ref="form"
- v-model="valid"
- >
+ <v-form ref="form" v-model="valid">
<v-select
v-model="selectedFormat"
:items="formats"
@@ -31,19 +25,11 @@
>
<pre>{{ example }}</pre>
</v-sheet>
- <v-checkbox
- v-model="exportApproved"
- label="Export only approved documents"
- hide-details
- />
+ <v-checkbox v-model="exportApproved" label="Export only approved documents" hide-details />
</v-form>
</v-card-text>
<v-card-actions>
- <v-btn
- class='text-capitalize ms-2 primary'
- :disabled="!valid"
- @click="downloadRequest"
- >
+ <v-btn class="text-capitalize ms-2 primary" :disabled="!valid" @click="downloadRequest">
{{ $t('generic.export') }}
</v-btn>
</v-card-actions>
@@ -72,7 +58,7 @@ export default Vue.extend({
polling: null,
selectedFormat: null as any,
taskId: '',
- valid: false,
+ valid: false
}
},
@@ -93,12 +79,12 @@ export default Vue.extend({
beforeDestroy() {
// @ts-ignore
- clearInterval(this.polling)
+ clearInterval(this.polling)
},
methods: {
reset() {
- (this.$refs.form as HTMLFormElement).reset()
+ ;(this.$refs.form as HTMLFormElement).reset()
this.taskId = ''
this.exportApproved = false
this.selectedFormat = null
@@ -107,13 +93,17 @@ export default Vue.extend({
async downloadRequest() {
this.isProcessing = true
- this.taskId = await this.$services.download.request(this.projectId, this.selectedFormat, this.exportApproved)
+ this.taskId = await this.$services.download.request(
+ this.projectId,
+ this.selectedFormat,
+ this.exportApproved
+ )
this.pollData()
},
pollData() {
// @ts-ignore
- this.polling = setInterval(async() => {
+ this.polling = setInterval(async () => {
if (this.taskId) {
const res = await this.$services.taskStatus.get(this.taskId)
if (res.ready) {
@@ -121,8 +111,8 @@ export default Vue.extend({
this.reset()
}
}
- }, 1000)
- },
- }
+ }, 1000)
+ }
+ }
})
</script>
diff --git a/frontend/pages/projects/_id/dataset/import.vue b/frontend/pages/projects/_id/dataset/import.vue
index adf388c7cd..3aede52af6 100644
--- a/frontend/pages/projects/_id/dataset/import.vue
+++ b/frontend/pages/projects/_id/dataset/import.vue
@@ -5,10 +5,7 @@
</v-card-title>
<v-card-text>
<v-overlay :value="isImporting">
- <v-progress-circular
- indeterminate
- size="64"
- />
+ <v-progress-circular indeterminate size="64" />
</v-overlay>
<v-select
v-model="selected"
@@ -48,7 +45,7 @@
:light="$vuetify.theme.dark"
class="mb-5 pa-5"
>
- <pre>{{ example }}</pre>
+ <pre>{{ example }}</pre>
</v-sheet>
<file-pond
v-if="selected && acceptedFileTypes !== '*'"
@@ -81,11 +78,7 @@
></v-data-table>
</v-card-text>
<v-card-actions>
- <v-btn
- class='text-capitalize ms-2 primary'
- :disabled="isDisabled"
- @click="importDataset"
- >
+ <v-btn class="text-capitalize ms-2 primary" :disabled="isDisabled" @click="importDataset">
{{ $t('generic.import') }}
</v-btn>
</v-card-actions>
@@ -94,17 +87,14 @@
<script>
import Cookies from 'js-cookie'
-import vueFilePond from "vue-filepond"
-import "filepond/dist/filepond.min.css"
-import FilePondPluginFileValidateType from "filepond-plugin-file-validate-type"
-const FilePond = vueFilePond(
- FilePondPluginFileValidateType,
-)
+import vueFilePond from 'vue-filepond'
+import 'filepond/dist/filepond.min.css'
+import FilePondPluginFileValidateType from 'filepond-plugin-file-validate-type'
+const FilePond = vueFilePond(FilePondPluginFileValidateType)
export default {
-
components: {
- FilePond,
+ FilePond
},
layout: 'project',
@@ -112,13 +102,13 @@ export default {
validate({ params }) {
return /^\d+$/.test(params.id)
},
-
+
data() {
return {
catalog: [],
selected: null,
myFiles: [],
- option: {'column_data': '', 'column_label': '', 'delimiter': ''},
+ option: { column_data: '', column_label: '', delimiter: '' },
taskId: null,
polling: null,
errors: [],
@@ -127,17 +117,15 @@ export default {
{ text: 'Line', value: 'line' },
{ text: 'Message', value: 'message' }
],
- requiredRules: [
- v => !!v || 'Field value is required'
- ],
+ requiredRules: [(v) => !!v || 'Field value is required'],
server: {
url: '/v1/fp',
headers: {
- 'X-CSRFToken': Cookies.get('csrftoken'),
+ 'X-CSRFToken': Cookies.get('csrftoken')
},
process: {
url: '/process/',
- method: 'POST',
+ method: 'POST'
},
patch: '/patch/',
revert: '/revert/',
@@ -147,7 +135,7 @@ export default {
},
uploadedFiles: [],
valid: false,
- isImporting: false,
+ isImporting: false
}
},
@@ -156,7 +144,7 @@ export default {
return this.uploadedFiles.length === 0 || this.taskId !== null || !this.valid
},
properties() {
- const item = this.catalog.find(item => item.displayName === this.selected)
+ const item = this.catalog.find((item) => item.displayName === this.selected)
if (item) {
return item.properties
} else {
@@ -174,7 +162,7 @@ export default {
return Object.fromEntries(textFields)
},
acceptedFileTypes() {
- const item = this.catalog.find(item => item.displayName === this.selected)
+ const item = this.catalog.find((item) => item.displayName === this.selected)
if (item) {
return item.acceptTypes
} else {
@@ -182,14 +170,15 @@ export default {
}
},
example() {
- const item = this.catalog.find(item => item.displayName === this.selected)
+ const item = this.catalog.find((item) => item.displayName === this.selected)
if (item) {
const column_data = 'column_data'
const column_label = 'column_label'
if (column_data in this.option && column_label in this.option) {
- return item.example.replaceAll(column_data, this.option[column_data])
- .replaceAll(column_label, this.option[column_label])
- .trim()
+ return item.example
+ .replaceAll(column_data, this.option[column_data])
+ .replaceAll(column_label, this.option[column_label])
+ .trim()
} else {
return item.example.trim()
}
@@ -201,7 +190,7 @@ export default {
watch: {
selected() {
- const item = this.catalog.find(item => item.displayName === this.selected)
+ const item = this.catalog.find((item) => item.displayName === this.selected)
for (const [key, value] of Object.entries(item.properties)) {
this.option[key] = value.default
}
@@ -220,7 +209,7 @@ export default {
},
beforeDestroy() {
- clearInterval(this.polling)
+ clearInterval(this.polling)
},
methods: {
@@ -231,25 +220,25 @@ export default {
},
handleFilePondRemoveFile(error, file) {
console.log(error)
- const index = this.uploadedFiles.findIndex(item => item.id === file.id)
+ const index = this.uploadedFiles.findIndex((item) => item.id === file.id)
if (index > -1) {
- this.uploadedFiles.splice(index, 1)
- this.$nextTick()
+ this.uploadedFiles.splice(index, 1)
+ this.$nextTick()
}
},
async importDataset() {
this.isImporting = true
- const item = this.catalog.find(item => item.displayName === this.selected)
+ const item = this.catalog.find((item) => item.displayName === this.selected)
this.taskId = await this.$services.parse.analyze(
this.$route.params.id,
item.name,
item.taskId,
- this.uploadedFiles.map(item => item.serverId),
+ this.uploadedFiles.map((item) => item.serverId),
this.option
)
},
pollData() {
- this.polling = setInterval(async() => {
+ this.polling = setInterval(async () => {
if (this.taskId) {
const res = await this.$services.taskStatus.get(this.taskId)
if (res.ready) {
@@ -263,8 +252,8 @@ export default {
}
}
}
- }, 3000)
- },
+ }, 3000)
+ },
toVisualize(text) {
if (text === '\t') {
return 'Tab'
@@ -276,6 +265,6 @@ export default {
return text
}
}
- },
-};
+ }
+}
</script>
diff --git a/frontend/pages/projects/_id/dataset/index.vue b/frontend/pages/projects/_id/dataset/index.vue
index 6da3d34002..49157ed627 100644
--- a/frontend/pages/projects/_id/dataset/index.vue
+++ b/frontend/pages/projects/_id/dataset/index.vue
@@ -9,7 +9,7 @@
class="text-capitalize ms-2"
:disabled="!canDelete"
outlined
- @click.stop="dialogDelete=true"
+ @click.stop="dialogDelete = true"
>
{{ $t('generic.delete') }}
</v-btn>
@@ -18,7 +18,7 @@
:disabled="!item.count"
class="text-capitalize"
color="error"
- @click="dialogDeleteAll=true"
+ @click="dialogDeleteAll = true"
>
{{ $t('generic.deleteAll') }}
</v-btn>
@@ -26,15 +26,12 @@
<form-delete
:selected="selected"
:item-key="itemKey"
- @cancel="dialogDelete=false"
+ @cancel="dialogDelete = false"
@remove="remove"
/>
</v-dialog>
<v-dialog v-model="dialogDeleteAll">
- <form-delete-bulk
- @cancel="dialogDeleteAll=false"
- @remove="removeAll"
- />
+ <form-delete-bulk @cancel="dialogDeleteAll = false" @remove="removeAll" />
</v-dialog>
</v-card-title>
<image-list
@@ -80,14 +77,13 @@ import ActionMenu from '~/components/example/ActionMenu.vue'
import { ProjectDTO } from '~/services/application/project/projectData'
export default Vue.extend({
-
components: {
ActionMenu,
AudioList,
DocumentList,
ImageList,
FormDelete,
- FormDeleteBulk,
+ FormDeleteBulk
},
layout: 'project',
@@ -134,15 +130,14 @@ export default Vue.extend({
} else {
return 'text'
}
- },
+ }
},
watch: {
- '$route.query': _.debounce(function() {
- // @ts-ignore
- this.$fetch()
- }, 1000
- ),
+ '$route.query': _.debounce(function () {
+ // @ts-ignore
+ this.$fetch()
+ }, 1000)
},
async created() {
diff --git a/frontend/pages/projects/_id/guideline/index.vue b/frontend/pages/projects/_id/guideline/index.vue
index 80490df54d..c1044723fe 100644
--- a/frontend/pages/projects/_id/guideline/index.vue
+++ b/frontend/pages/projects/_id/guideline/index.vue
@@ -18,7 +18,6 @@ import { Editor } from '@toast-ui/vue-editor'
import '@/assets/style/editor.css'
export default {
-
components: {
Editor
},
@@ -35,7 +34,7 @@ export default {
language: this.$t('toastui.localeCode')
},
project: {},
- mounted: false,
+ mounted: false
}
},
@@ -47,7 +46,7 @@ export default {
},
methods: {
- updateProject: _.debounce(function() {
+ updateProject: _.debounce(function () {
if (this.mounted) {
this.project.guideline = this.$refs.toastuiEditor.invoke('getMarkdown')
this.$services.project.update(this.project)
@@ -58,7 +57,8 @@ export default {
</script>
<style>
-.te-md-container .CodeMirror, .tui-editor-contents {
+.te-md-container .CodeMirror,
+.tui-editor-contents {
font-size: 20px;
}
</style>
diff --git a/frontend/pages/projects/_id/image-classification/index.vue b/frontend/pages/projects/_id/image-classification/index.vue
index 0b21159e21..3bce0f3484 100644
--- a/frontend/pages/projects/_id/image-classification/index.vue
+++ b/frontend/pages/projects/_id/image-classification/index.vue
@@ -11,11 +11,7 @@
@click:clear-label="clear"
@click:review="confirm"
>
- <v-btn-toggle
- v-model="labelOption"
- mandatory
- class="ms-2"
- >
+ <v-btn-toggle v-model="labelOption" mandatory class="ms-2">
<v-btn icon>
<v-icon>{{ mdiFormatListBulleted }}</v-icon>
</v-btn>
@@ -24,16 +20,10 @@
</v-btn>
</v-btn-toggle>
</toolbar-laptop>
- <toolbar-mobile
- :total="images.count"
- class="d-flex d-sm-none"
- />
+ <toolbar-mobile :total="images.count" class="d-flex d-sm-none" />
</template>
<template #content>
- <v-card
- v-shortkey="shortKeys"
- @shortkey="addOrRemove"
- >
+ <v-card v-shortkey="shortKeys" @shortkey="addOrRemove">
<v-card-title>
<label-group
v-if="labelOption === 0"
@@ -53,12 +43,7 @@
/>
</v-card-title>
<v-divider />
- <v-img
- contain
- :src="image.fileUrl"
- :max-height="imageSize.height"
- class="grey lighten-2"
- />
+ <v-img contain :src="image.fileUrl" :max-height="imageSize.height" class="grey lighten-2" />
</v-card>
</template>
<template #sidebar>
@@ -82,7 +67,6 @@ import { useLabelList } from '@/composables/useLabelList'
import AnnotationProgress from '@/components/tasks/sidebar/AnnotationProgress.vue'
export default {
-
components: {
AnnotationProgress,
LabelGroup,
@@ -105,7 +89,7 @@ export default {
return {
...toRefs(state),
getLabelList,
- shortKeys,
+ shortKeys
}
},
@@ -186,7 +170,7 @@ export default {
async addOrRemove(event) {
const labelId = parseInt(event.srcKey, 10)
- const annotation = this.annotations.find(item => item.label === labelId)
+ const annotation = this.annotations.find((item) => item.label === labelId)
if (annotation) {
await this.remove(annotation.id)
} else {
@@ -220,7 +204,7 @@ export default {
setImageSize(val) {
const img = new Image()
const self = this
- img.onload = function() {
+ img.onload = function () {
self.imageSize.height = this.height
self.imageSize.width = this.width
}
diff --git a/frontend/pages/projects/_id/index.vue b/frontend/pages/projects/_id/index.vue
index a6572d1559..1049886957 100644
--- a/frontend/pages/projects/_id/index.vue
+++ b/frontend/pages/projects/_id/index.vue
@@ -3,45 +3,19 @@
<v-card-title>
{{ $t('projectHome.welcome') }}
</v-card-title>
- <v-stepper
- v-model="e6"
- vertical
- non-linear
- >
- <div
- v-for="(item, index) in items"
- :key="index"
- >
- <v-stepper-step
- :complete="e6 > index + 1"
- :step="index + 1"
- editable
- >
+ <v-stepper v-model="e6" vertical non-linear>
+ <div v-for="(item, index) in items" :key="index">
+ <v-stepper-step :complete="e6 > index + 1" :step="index + 1" editable>
{{ item.title }}
</v-stepper-step>
<v-stepper-content :step="index + 1">
- <v-card
- v-if="e6 === index + 1"
- class="mb-12"
- width="560"
- height="315"
- >
- <youtube
- ref="youtube"
- :video-id="item.videoId"
- />
+ <v-card v-if="e6 === index + 1" class="mb-12" width="560" height="315">
+ <youtube ref="youtube" :video-id="item.videoId" />
</v-card>
- <v-btn
- color="primary mt-5"
- @click="next"
- >
+ <v-btn color="primary mt-5" @click="next">
{{ $t('generic.continue') }}
</v-btn>
- <v-btn
- class="mt-5"
- text
- @click="prev"
- >
+ <v-btn class="mt-5" text @click="prev">
{{ $t('generic.cancel') }}
</v-btn>
</v-stepper-content>
@@ -65,9 +39,18 @@ export default {
{ title: this.$t('projectHome.importData'), videoId: 'dA4ID1DSxCE' },
{ title: this.$t('projectHome.createLabels'), videoId: '1bSML270quU' },
{ title: this.$t('projectHome.addMembers'), videoId: 'NI09dcBz-qA' },
- { title: this.$t('projectHome.defineGuideline'), videoId: 'AvvX3Xs32nA' },
- { title: this.$t('projectHome.annotateDataset'), videoId: 'F3XoSdyiMhA' },
- { title: this.$t('projectHome.viewStatistics'), videoId: 'kfRpa0mNQMY' },
+ {
+ title: this.$t('projectHome.defineGuideline'),
+ videoId: 'AvvX3Xs32nA'
+ },
+ {
+ title: this.$t('projectHome.annotateDataset'),
+ videoId: 'F3XoSdyiMhA'
+ },
+ {
+ title: this.$t('projectHome.viewStatistics'),
+ videoId: 'kfRpa0mNQMY'
+ },
{ title: this.$t('projectHome.exportDataset'), videoId: 'Pfy_QcHEeQ4' }
]
}
diff --git a/frontend/pages/projects/_id/intent-detection-and-slot-filling/index.vue b/frontend/pages/projects/_id/intent-detection-and-slot-filling/index.vue
index 25904e2044..ed3236aef5 100644
--- a/frontend/pages/projects/_id/intent-detection-and-slot-filling/index.vue
+++ b/frontend/pages/projects/_id/intent-detection-and-slot-filling/index.vue
@@ -1,4 +1,3 @@
-
<template>
<layout-text v-if="doc.id">
<template #header>
@@ -12,10 +11,7 @@
@click:clear-label="clear"
@click:review="confirm"
/>
- <toolbar-mobile
- :total="docs.count"
- class="d-flex d-sm-none"
- />
+ <toolbar-mobile :total="docs.count" class="d-flex d-sm-none" />
</template>
<template #content>
<v-card>
@@ -132,8 +128,8 @@ export default {
methods: {
async listSpan(docId) {
- const spans = await this.$services.sequenceLabeling.list(this.projectId, docId);
- this.spans = spans;
+ const spans = await this.$services.sequenceLabeling.list(this.projectId, docId)
+ this.spans = spans
},
async deleteSpan(id) {
@@ -142,12 +138,23 @@ export default {
},
async addSpan(startOffset, endOffset, labelId) {
- await this.$services.sequenceLabeling.create(this.projectId, this.doc.id, labelId, startOffset, endOffset)
+ await this.$services.sequenceLabeling.create(
+ this.projectId,
+ this.doc.id,
+ labelId,
+ startOffset,
+ endOffset
+ )
await this.listSpan(this.doc.id)
},
async updateSpan(annotationId, labelId) {
- await this.$services.sequenceLabeling.changeLabel(this.projectId, this.doc.id, annotationId, labelId)
+ await this.$services.sequenceLabeling.changeLabel(
+ this.projectId,
+ this.doc.id,
+ annotationId,
+ labelId
+ )
await this.listSpan(this.doc.id)
},
@@ -187,7 +194,7 @@ export default {
font-size: 1.25rem !important;
font-weight: 500;
line-height: 2rem;
- font-family: "Roboto", sans-serif !important;
+ font-family: 'Roboto', sans-serif !important;
opacity: 0.6;
}
</style>
diff --git a/frontend/pages/projects/_id/labels/_label_id/edit.vue b/frontend/pages/projects/_id/labels/_label_id/edit.vue
index fe1810e9f9..24506168d3 100644
--- a/frontend/pages/projects/_id/labels/_label_id/edit.vue
+++ b/frontend/pages/projects/_id/labels/_label_id/edit.vue
@@ -1,15 +1,6 @@
<template>
- <form-create
- v-slot="slotProps"
- v-bind.sync="editedItem"
- :items="items"
- >
- <v-btn
- :disabled="!slotProps.valid"
- color="primary"
- class="text-capitalize"
- @click="save"
- >
+ <form-create v-slot="slotProps" v-bind.sync="editedItem" :items="items">
+ <v-btn :disabled="!slotProps.valid" color="primary" class="text-capitalize" @click="save">
Save
</v-btn>
</form-create>
@@ -23,18 +14,17 @@ import FormCreate from '~/components/label/FormCreate.vue'
export default Vue.extend({
components: {
- FormCreate,
+ FormCreate
},
layout: 'project',
validate({ params, query, app }) {
- if (!['category', 'span', 'relation'].includes((query.type as string))) {
+ if (!['category', 'span', 'relation'].includes(query.type as string)) {
return false
}
if (/^\d+$/.test(params.id)) {
- return app.$services.project.findById(params.id)
- .then((res:ProjectDTO) => {
+ return app.$services.project.findById(params.id).then((res: ProjectDTO) => {
return res.canDefineLabel
})
}
@@ -67,12 +57,12 @@ export default Vue.extend({
const type = this.$route.query.type
if (type === 'category') {
return this.$services.categoryType
- } else if (type === 'span'){
+ } else if (type === 'span') {
return this.$services.spanType
} else {
return this.$services.relationType
}
- },
+ }
},
async created() {
diff --git a/frontend/pages/projects/_id/labels/add.vue b/frontend/pages/projects/_id/labels/add.vue
index 223f56b76b..e89eacced8 100644
--- a/frontend/pages/projects/_id/labels/add.vue
+++ b/frontend/pages/projects/_id/labels/add.vue
@@ -1,15 +1,6 @@
<template>
- <form-create
- v-slot="slotProps"
- v-bind.sync="editedItem"
- :items="items"
- >
- <v-btn
- :disabled="!slotProps.valid"
- color="primary"
- class="text-capitalize"
- @click="save"
- >
+ <form-create v-slot="slotProps" v-bind.sync="editedItem" :items="items">
+ <v-btn :disabled="!slotProps.valid" color="primary" class="text-capitalize" @click="save">
Save
</v-btn>
@@ -33,18 +24,17 @@ import FormCreate from '~/components/label/FormCreate.vue'
export default Vue.extend({
components: {
- FormCreate,
+ FormCreate
},
layout: 'project',
validate({ params, query, app }) {
- if (!['category', 'span', 'relation'].includes((query.type as string))) {
+ if (!['category', 'span', 'relation'].includes(query.type as string)) {
return false
}
if (/^\d+$/.test(params.id)) {
- return app.$services.project.findById(params.id)
- .then((res:ProjectDTO) => {
+ return app.$services.project.findById(params.id).then((res: ProjectDTO) => {
return res.canDefineLabel
})
}
@@ -85,7 +75,7 @@ export default Vue.extend({
} else {
return this.$services.relationType
}
- },
+ }
},
async created() {
diff --git a/frontend/pages/projects/_id/labels/import.vue b/frontend/pages/projects/_id/labels/import.vue
index 162f0c5d74..930f349d41 100644
--- a/frontend/pages/projects/_id/labels/import.vue
+++ b/frontend/pages/projects/_id/labels/import.vue
@@ -1,9 +1,5 @@
<template>
- <form-import
- :error-message="errorMessage"
- @clear="clearErrorMessage"
- @upload="upload"
- />
+ <form-import :error-message="errorMessage" @clear="clearErrorMessage" @upload="upload" />
</template>
<script lang="ts">
@@ -13,18 +9,17 @@ import FormImport from '~/components/label/FormImport.vue'
export default Vue.extend({
components: {
- FormImport,
+ FormImport
},
layout: 'project',
validate({ params, query, app }) {
- if (!['category', 'span', 'relation'].includes((query.type as string))) {
+ if (!['category', 'span', 'relation'].includes(query.type as string)) {
return false
}
if (/^\d+$/.test(params.id)) {
- return app.$services.project.findById(params.id)
- .then((res:ProjectDTO) => {
+ return app.$services.project.findById(params.id).then((res: ProjectDTO) => {
return res.canDefineLabel
})
}
@@ -33,7 +28,7 @@ export default Vue.extend({
data() {
return {
- errorMessage: '',
+ errorMessage: ''
}
},
@@ -51,7 +46,7 @@ export default Vue.extend({
} else {
return this.$services.relationType
}
- },
+ }
},
methods: {
@@ -59,7 +54,7 @@ export default Vue.extend({
try {
await this.service.upload(this.projectId, file)
this.$router.push(`/projects/${this.projectId}/labels`)
- } catch(e) {
+ } catch (e) {
this.errorMessage = e.message
}
},
@@ -67,6 +62,6 @@ export default Vue.extend({
clearErrorMessage() {
this.errorMessage = ''
}
- },
+ }
})
</script>
diff --git a/frontend/pages/projects/_id/labels/index.vue b/frontend/pages/projects/_id/labels/index.vue
index fc572677f1..238b378391 100644
--- a/frontend/pages/projects/_id/labels/index.vue
+++ b/frontend/pages/projects/_id/labels/index.vue
@@ -20,24 +20,15 @@
class="text-capitalize ms-2"
:disabled="!canDelete"
outlined
- @click.stop="dialogDelete=true"
+ @click.stop="dialogDelete = true"
>
{{ $t('generic.delete') }}
</v-btn>
<v-dialog v-model="dialogDelete">
- <form-delete
- :selected="selected"
- @cancel="dialogDelete=false"
- @remove="remove"
- />
+ <form-delete :selected="selected" @cancel="dialogDelete = false" @remove="remove" />
</v-dialog>
</v-card-title>
- <label-list
- v-model="selected"
- :items="items"
- :is-loading="isLoading"
- @edit="editItem"
- />
+ <label-list v-model="selected" :items="items" :is-loading="isLoading" @edit="editItem" />
</v-card>
</template>
@@ -50,7 +41,6 @@ import { LabelDTO } from '~/services/application/label/labelData'
import { ProjectDTO } from '~/services/application/project/projectData'
export default Vue.extend({
-
components: {
ActionMenu,
FormDelete,
@@ -60,8 +50,7 @@ export default Vue.extend({
validate({ params, app }) {
if (/^\d+$/.test(params.id)) {
- return app.$services.project.findById(params.id)
- .then((res:ProjectDTO) => {
+ return app.$services.project.findById(params.id).then((res: ProjectDTO) => {
return res.canDefineLabel
})
}
@@ -75,7 +64,7 @@ export default Vue.extend({
selected: [] as LabelDTO[],
isLoading: false,
tab: 0,
- project: {} as ProjectDTO,
+ project: {} as ProjectDTO
}
},
@@ -102,7 +91,7 @@ export default Vue.extend({
labelType(): string {
if (this.hasMultiType) {
- if (this.isIntentDetectionAndSlotFilling){
+ if (this.isIntentDetectionAndSlotFilling) {
return ['category', 'span'][this.tab!]
} else {
return ['span', 'relation'][this.tab!]
@@ -149,7 +138,7 @@ export default Vue.extend({
this.items = await this.service.list(this.projectId)
this.isLoading = false
},
-
+
async remove() {
await this.service.bulkDelete(this.projectId, this.selected)
this.list()
diff --git a/frontend/pages/projects/_id/members/index.vue b/frontend/pages/projects/_id/members/index.vue
index d3521ac506..62879f6e07 100644
--- a/frontend/pages/projects/_id/members/index.vue
+++ b/frontend/pages/projects/_id/members/index.vue
@@ -1,18 +1,14 @@
<template>
<v-card>
<v-card-title>
- <v-btn
- class="text-capitalize"
- color="primary"
- @click.stop="dialogCreate=true"
- >
+ <v-btn class="text-capitalize" color="primary" @click.stop="dialogCreate = true">
{{ $t('generic.add') }}
</v-btn>
<v-btn
class="text-capitalize ms-2"
:disabled="!canDelete"
outlined
- @click.stop="dialogDelete=true"
+ @click.stop="dialogDelete = true"
>
{{ $t('generic.delete') }}
</v-btn>
@@ -25,19 +21,10 @@
/>
</v-dialog>
<v-dialog v-model="dialogDelete">
- <form-delete
- :selected="selected"
- @cancel="dialogDelete=false"
- @remove="remove"
- />
+ <form-delete :selected="selected" @cancel="dialogDelete = false" @remove="remove" />
</v-dialog>
</v-card-title>
- <member-list
- v-model="selected"
- :items="items"
- :is-loading="isLoading"
- @edit="editItem"
- />
+ <member-list v-model="selected" :items="items" :is-loading="isLoading" @edit="editItem" />
</v-card>
</template>
@@ -49,7 +36,6 @@ import FormCreate from '~/components/member/FormCreate.vue'
import { MemberDTO } from '~/services/application/member/memberData'
export default Vue.extend({
-
components: {
MemberList,
FormCreate,
@@ -89,7 +75,7 @@ export default Vue.extend({
this.isLoading = true
try {
this.items = await this.$services.member.list(this.projectId)
- } catch(e) {
+ } catch (e) {
this.$router.push(`/projects/${this.projectId}`)
} finally {
this.isLoading = false
@@ -111,7 +97,7 @@ export default Vue.extend({
await this.$services.member.create(this.projectId, this.editedItem)
this.close()
this.$fetch()
- } catch(e) {
+ } catch (e) {
this.errorMessage = e.message
}
},
@@ -121,7 +107,7 @@ export default Vue.extend({
await this.$services.member.update(this.projectId, this.editedItem)
this.close()
this.$fetch()
- } catch(e) {
+ } catch (e) {
this.errorMessage = e.message
}
},
diff --git a/frontend/pages/projects/_id/metrics/index.vue b/frontend/pages/projects/_id/metrics/index.vue
index 3e5aaeff05..091b1e2221 100644
--- a/frontend/pages/projects/_id/metrics/index.vue
+++ b/frontend/pages/projects/_id/metrics/index.vue
@@ -34,9 +34,9 @@ import MemberProgress from '~/components/metrics/MemberProgress'
export default {
components: {
LabelDistribution,
- MemberProgress,
+ MemberProgress
},
-
+
layout: 'project',
validate({ params }) {
@@ -51,7 +51,7 @@ export default {
relationTypes: [],
relationDistribution: {},
spanTypes: [],
- spanDistribution: {},
+ spanDistribution: {}
}
},
@@ -65,7 +65,9 @@ export default {
this.project = await this.$services.project.findById(this.projectId)
if (this.project.hasCategory) {
this.categoryTypes = await this.$services.categoryType.list(this.projectId)
- this.categoryDistribution = await this.$services.metrics.fetchCategoryDistribution(this.projectId)
+ this.categoryDistribution = await this.$services.metrics.fetchCategoryDistribution(
+ this.projectId
+ )
}
if (this.project.hasSpan) {
this.spanTypes = await this.$services.spanType.list(this.projectId)
@@ -73,7 +75,9 @@ export default {
}
if (this.project.useRelation) {
this.relationTypes = await this.$services.relationType.list(this.projectId)
- this.relationDistribution = await this.$services.metrics.fetchRelationDistribution(this.projectId)
+ this.relationDistribution = await this.$services.metrics.fetchRelationDistribution(
+ this.projectId
+ )
}
}
}
diff --git a/frontend/pages/projects/_id/sequence-labeling/index.vue b/frontend/pages/projects/_id/sequence-labeling/index.vue
index b673b40262..37ecd1357f 100644
--- a/frontend/pages/projects/_id/sequence-labeling/index.vue
+++ b/frontend/pages/projects/_id/sequence-labeling/index.vue
@@ -11,10 +11,7 @@
@click:clear-label="clear"
@click:review="confirm"
/>
- <toolbar-mobile
- :total="docs.count"
- class="d-flex d-sm-none"
- />
+ <toolbar-mobile :total="docs.count" class="d-flex d-sm-none" />
</template>
<template #content>
<v-card>
@@ -46,19 +43,13 @@
<v-card class="mt-4">
<v-card-title>Label Types</v-card-title>
<v-card-text>
- <v-switch
- v-if="useRelationLabeling"
- v-model="relationMode"
- >
+ <v-switch v-if="useRelationLabeling" v-model="relationMode">
<template #label>
<span v-if="relationMode">Relation</span>
<span v-else>Span</span>
</template>
</v-switch>
- <v-chip-group
- v-model="selectedLabelIndex"
- column
- >
+ <v-chip-group v-model="selectedLabelIndex" column>
<v-chip
v-for="(item, index) in labelTypes"
:key="item.id"
@@ -97,7 +88,6 @@ import EntityEditor from '@/components/tasks/sequenceLabeling/EntityEditor.vue'
import AnnotationProgress from '@/components/tasks/sidebar/AnnotationProgress.vue'
export default {
-
components: {
AnnotationProgress,
EntityEditor,
@@ -125,7 +115,7 @@ export default {
rtl: false,
selectedLabelIndex: null,
progress: {},
- relationMode: false,
+ relationMode: false
}
},
@@ -148,7 +138,7 @@ export default {
...mapGetters('config', ['isRTL']),
shortKeys() {
- return Object.fromEntries(this.spanTypes.map(item => [item.id, [item.suffixKey]]))
+ return Object.fromEntries(this.spanTypes.map((item) => [item.id, [item.suffixKey]]))
},
projectId() {
@@ -206,17 +196,18 @@ export default {
methods: {
async maybeFetchSpanTypes(annotations) {
- const labelIds = new Set(this.spanTypes.map((label) => label.id));
+ const labelIds = new Set(this.spanTypes.map((label) => label.id))
if (annotations.some((item) => !labelIds.has(item.label))) {
- this.spanTypes = await this.$services.spanType.list(this.projectId);
+ this.spanTypes = await this.$services.spanType.list(this.projectId)
}
},
async list(docId) {
const annotations = await this.$services.sequenceLabeling.list(this.projectId, docId)
const relations = await this.$services.sequenceLabeling.listRelations(this.projectId, docId)
- // In colab mode, if someone add a new label and annotate data with the label during your work,
- // it occurs exception because there is no corresponding label.
+ // In colab mode, if someone add a new label and annotate data
+ // with the label during your work, it occurs exception
+ // because there is no corresponding label.
await this.maybeFetchSpanTypes(annotations)
this.annotations = annotations
this.relations = relations
@@ -228,22 +219,44 @@ export default {
},
async addSpan(startOffset, endOffset, labelId) {
- await this.$services.sequenceLabeling.create(this.projectId, this.doc.id, labelId, startOffset, endOffset)
+ await this.$services.sequenceLabeling.create(
+ this.projectId,
+ this.doc.id,
+ labelId,
+ startOffset,
+ endOffset
+ )
await this.list(this.doc.id)
},
async updateSpan(annotationId, labelId) {
- await this.$services.sequenceLabeling.changeLabel(this.projectId, this.doc.id, annotationId, labelId)
+ await this.$services.sequenceLabeling.changeLabel(
+ this.projectId,
+ this.doc.id,
+ annotationId,
+ labelId
+ )
await this.list(this.doc.id)
},
async addRelation(fromId, toId, typeId) {
- await this.$services.sequenceLabeling.createRelation(this.projectId, this.doc.id, fromId, toId, typeId)
+ await this.$services.sequenceLabeling.createRelation(
+ this.projectId,
+ this.doc.id,
+ fromId,
+ toId,
+ typeId
+ )
await this.list(this.doc.id)
},
async updateRelation(relationId, typeId) {
- await this.$services.sequenceLabeling.updateRelation(this.projectId, this.doc.id, relationId, typeId)
+ await this.$services.sequenceLabeling.updateRelation(
+ this.projectId,
+ this.doc.id,
+ relationId,
+ typeId
+ )
await this.list(this.doc.id)
},
@@ -287,7 +300,7 @@ export default {
font-size: 1.25rem !important;
font-weight: 500;
line-height: 2rem;
- font-family: "Roboto", sans-serif !important;
+ font-family: 'Roboto', sans-serif !important;
opacity: 0.6;
}
</style>
diff --git a/frontend/pages/projects/_id/sequence-to-sequence/index.vue b/frontend/pages/projects/_id/sequence-to-sequence/index.vue
index 957a606e13..a68e4ad3b0 100644
--- a/frontend/pages/projects/_id/sequence-to-sequence/index.vue
+++ b/frontend/pages/projects/_id/sequence-to-sequence/index.vue
@@ -11,10 +11,7 @@
@click:clear-label="clear"
@click:review="confirm"
/>
- <toolbar-mobile
- :total="docs.count"
- class="d-flex d-sm-none"
- />
+ <toolbar-mobile :total="docs.count" class="d-flex d-sm-none" />
</template>
<template #content>
<v-card class="mb-5">
@@ -45,7 +42,6 @@ import AnnotationProgress from '@/components/tasks/sidebar/AnnotationProgress.vu
import Seq2seqBox from '~/components/tasks/seq2seq/Seq2seqBox'
export default {
-
components: {
AnnotationProgress,
LayoutText,
diff --git a/frontend/pages/projects/_id/settings/index.vue b/frontend/pages/projects/_id/settings/index.vue
index 00c909b5d3..b0bc352780 100644
--- a/frontend/pages/projects/_id/settings/index.vue
+++ b/frontend/pages/projects/_id/settings/index.vue
@@ -1,15 +1,9 @@
<template>
<v-card>
- <v-tabs
- v-model="tab"
- >
+ <v-tabs v-model="tab">
<v-tabs-slider color="primary" />
- <v-tab href="#tab-project" class="text-capitalize">
- Project
- </v-tab>
- <v-tab href="#tab-auto-labeling" class="text-capitalize">
- Auto Labeling
- </v-tab>
+ <v-tab href="#tab-project" class="text-capitalize"> Project </v-tab>
+ <v-tab href="#tab-auto-labeling" class="text-capitalize"> Auto Labeling </v-tab>
</v-tabs>
<v-divider />
@@ -30,7 +24,6 @@ import FormUpdate from '@/components/project/FormUpdate.vue'
import ConfigList from '@/components/configAutoLabeling/ConfigList.vue'
export default Vue.extend({
-
components: {
ConfigList,
FormUpdate
diff --git a/frontend/pages/projects/_id/speech-to-text/index.vue b/frontend/pages/projects/_id/speech-to-text/index.vue
index 0d5c3a418c..937326c919 100644
--- a/frontend/pages/projects/_id/speech-to-text/index.vue
+++ b/frontend/pages/projects/_id/speech-to-text/index.vue
@@ -11,22 +11,13 @@
@click:clear-label="clear"
@click:review="confirm"
/>
- <toolbar-mobile
- :total="items.count"
- class="d-flex d-sm-none"
- />
+ <toolbar-mobile :total="items.count" class="d-flex d-sm-none" />
</template>
<template #content>
<v-overlay :value="isLoading">
- <v-progress-circular
- indeterminate
- size="64"
- />
+ <v-progress-circular indeterminate size="64" />
</v-overlay>
- <audio-viewer
- :source="item.fileUrl"
- class="mb-5"
- />
+ <audio-viewer :source="item.fileUrl" class="mb-5" />
<seq2seq-box
:text="item.text"
:annotations="annotations"
@@ -53,7 +44,6 @@ import Seq2seqBox from '~/components/tasks/seq2seq/Seq2seqBox'
import AudioViewer from '~/components/tasks/audio/AudioViewer'
export default {
-
components: {
AnnotationProgress,
AudioViewer,
diff --git a/frontend/pages/projects/_id/text-classification/index.vue b/frontend/pages/projects/_id/text-classification/index.vue
index 81636cb9a2..b8bd49ed23 100644
--- a/frontend/pages/projects/_id/text-classification/index.vue
+++ b/frontend/pages/projects/_id/text-classification/index.vue
@@ -11,15 +11,9 @@
@click:clear-label="clearTeacherList(project.id, example.id)"
@click:review="confirm(project.id)"
>
- <button-label-switch
- class="ms-2"
- @change="labelComponent=$event"
- />
+ <button-label-switch class="ms-2" @change="labelComponent = $event" />
</toolbar-laptop>
- <toolbar-mobile
- :total="totalExample"
- class="d-flex d-sm-none"
- />
+ <toolbar-mobile :total="totalExample" class="d-flex d-sm-none" />
</template>
<template #content>
<v-card
@@ -37,11 +31,7 @@
/>
</v-card-title>
<v-divider />
- <v-card-text
- class="title highlight"
- style="white-space: pre-wrap;"
- v-text="example.text"
- />
+ <v-card-text class="title highlight" style="white-space: pre-wrap" v-text="example.text" />
</v-card>
</template>
<template #sidebar>
@@ -67,7 +57,6 @@ import { useTeacherList } from '@/composables/useTeacherList'
import AnnotationProgress from '@/components/tasks/sidebar/AnnotationProgress.vue'
export default {
-
components: {
AnnotationProgress,
ButtonLabelSwitch,
@@ -106,15 +95,12 @@ export default {
getProjectById(projectId)
updateProgress(projectId)
- const { fetch } = useFetch(async() => {
- await getExample(
- projectId,
- query.value
- )
+ const { fetch } = useFetch(async () => {
+ await getExample(projectId, query.value)
if (enableAutoLabeling.value) {
try {
await autoLabel(projectId, exampleState.example.id)
- } catch(e) {
+ } catch (e) {
enableAutoLabeling.value = false
alert(e.response.data.detail)
}
@@ -136,7 +122,7 @@ export default {
enableAutoLabeling,
labelComponent,
removeTeacher,
- shortKeys,
+ shortKeys
}
}
}
diff --git a/frontend/pages/projects/create.vue b/frontend/pages/projects/create.vue
index e8c6a0ab55..0bda529cba 100644
--- a/frontend/pages/projects/create.vue
+++ b/frontend/pages/projects/create.vue
@@ -1,8 +1,5 @@
<template>
- <form-create
- v-bind.sync="editedItem"
- @save="create"
- />
+ <form-create v-bind.sync="editedItem" @save="create" />
</template>
<script lang="ts">
@@ -12,7 +9,7 @@ import { ProjectWriteDTO } from '~/services/application/project/projectData'
export default Vue.extend({
components: {
- FormCreate,
+ FormCreate
},
layout: 'projects',
@@ -31,7 +28,7 @@ export default Vue.extend({
allowOverlapping: false,
graphemeMode: false,
useRelation: false,
- tags: [] as string[],
+ tags: [] as string[]
} as ProjectWriteDTO,
defaultItem: {
name: '',
@@ -43,8 +40,8 @@ export default Vue.extend({
allowOverlapping: false,
graphemeMode: false,
useRelation: false,
- tags: [] as string[],
- } as ProjectWriteDTO,
+ tags: [] as string[]
+ } as ProjectWriteDTO
}
},
@@ -55,7 +52,7 @@ export default Vue.extend({
this.$nextTick(() => {
this.editedItem = Object.assign({}, this.defaultItem)
})
- },
+ }
}
})
</script>
diff --git a/frontend/pages/projects/index.vue b/frontend/pages/projects/index.vue
index 4c21ceb594..0bccdd321e 100644
--- a/frontend/pages/projects/index.vue
+++ b/frontend/pages/projects/index.vue
@@ -1,27 +1,19 @@
<template>
<v-card>
<v-card-title v-if="isStaff">
- <v-btn
- class="text-capitalize"
- color="primary"
- @click.stop="$router.push('projects/create')"
- >
+ <v-btn class="text-capitalize" color="primary" @click.stop="$router.push('projects/create')">
{{ $t('generic.create') }}
</v-btn>
<v-btn
class="text-capitalize ms-2"
:disabled="!canDelete"
outlined
- @click.stop="dialogDelete=true"
+ @click.stop="dialogDelete = true"
>
{{ $t('generic.delete') }}
</v-btn>
<v-dialog v-model="dialogDelete">
- <form-delete
- :selected="selected"
- @cancel="dialogDelete=false"
- @remove="remove"
- />
+ <form-delete :selected="selected" @cancel="dialogDelete = false" @remove="remove" />
</v-dialog>
</v-card-title>
<project-list
@@ -43,10 +35,9 @@ import { ProjectDTO, ProjectListDTO } from '~/services/application/project/proje
import FormDelete from '~/components/project/FormDelete.vue'
export default Vue.extend({
-
components: {
FormDelete,
- ProjectList,
+ ProjectList
},
layout: 'projects',
@@ -71,15 +62,14 @@ export default Vue.extend({
...mapGetters('auth', ['isStaff']),
canDelete(): boolean {
return this.selected.length > 0
- },
+ }
},
watch: {
- '$route.query': _.debounce(function() {
- // @ts-ignore
- this.$fetch()
- }, 1000
- ),
+ '$route.query': _.debounce(function () {
+ // @ts-ignore
+ this.$fetch()
+ }, 1000)
},
methods: {
diff --git a/frontend/plugins/color.ts b/frontend/plugins/color.ts
index 05b91a1f5f..50f96ac2e2 100644
--- a/frontend/plugins/color.ts
+++ b/frontend/plugins/color.ts
@@ -7,10 +7,10 @@ declare module 'vue/types/vue' {
}
Vue.prototype.$contrastColor = (hexString: string) => {
- // W3c offers a formula for calculating ideal color:
+ // W3c offers a formula for calculating ideal color:
// https://www.w3.org/TR/AERT/#color-contrast
const r = parseInt(hexString.substr(1, 2), 16)
const g = parseInt(hexString.substr(3, 2), 16)
const b = parseInt(hexString.substr(5, 2), 16)
- return ((((r * 299) + (g * 587) + (b * 114)) / 1000) < 128) ? '#ffffff' : '#000000'
+ return (r * 299 + g * 587 + b * 114) / 1000 < 128 ? '#ffffff' : '#000000'
}
diff --git a/frontend/plugins/filters.js b/frontend/plugins/filters.js
index 6a52a76895..68766e080b 100644
--- a/frontend/plugins/filters.js
+++ b/frontend/plugins/filters.js
@@ -1,6 +1,6 @@
import Vue from 'vue'
-export const truncate = function(text, length, clamp) {
+export const truncate = function (text, length, clamp) {
text = text || ''
clamp = clamp || '...'
length = length || 30
diff --git a/frontend/plugins/role.ts b/frontend/plugins/role.ts
index db2c910848..333e3a6db7 100644
--- a/frontend/plugins/role.ts
+++ b/frontend/plugins/role.ts
@@ -7,9 +7,9 @@ declare module 'vue/types/vue' {
}
type RoleMapping = {
- projectAdmin: string,
- annotator: string,
- annotationApprover: string,
+ projectAdmin: string
+ annotator: string
+ annotationApprover: string
undefined: string
}
diff --git a/frontend/plugins/services.ts b/frontend/plugins/services.ts
index 9ceb191f70..b320b20a4a 100644
--- a/frontend/plugins/services.ts
+++ b/frontend/plugins/services.ts
@@ -13,7 +13,7 @@ import { APIUserRepository } from '~/repositories/user/apiUserRepository'
import { APIMetricsRepository } from '~/repositories/metrics/apiMetricsRepository'
import { APIRoleRepository } from '~/repositories/role/apiRoleRepository'
import { APIProjectRepository } from '~/repositories/project/apiProjectRepository'
-import { LocalStorageOptionRepository} from '~/repositories/option/apiOptionRepository'
+import { LocalStorageOptionRepository } from '~/repositories/option/apiOptionRepository'
import { APIMemberRepository } from '~/repositories/member/apiMemberRepository'
import { APILabelRepository } from '~/repositories/label/apiLabelRepository'
import { APIExampleRepository } from '~/repositories/example/apiDocumentRepository'
@@ -33,7 +33,7 @@ import { Seq2seqApplicationService } from '~/services/application/tasks/seq2seq/
import { ConfigApplicationService } from '~/services/application/autoLabeling/configApplicationService'
import { TemplateApplicationService } from '~/services/application/autoLabeling/templateApplicationService'
import { APITextClassificationRepository } from '~/repositories/tasks/textClassification/apiTextClassification'
-import { TextClassificationApplicationService } from '~/services/application/tasks/textClassification/textClassificationApplicationService'
+import { TextClassificationService } from '~/services/application/tasks/textClassification/textClassificationApplicationService'
import { AuthApplicationService } from '~/services/application/auth/authApplicationService'
import { APIDownloadFormatRepository } from '~/repositories/download/apiDownloadFormatRepository'
import { APIDownloadRepository } from '~/repositories/download/apiDownloadRepository'
@@ -41,32 +41,32 @@ import { DownloadApplicationService } from '~/services/application/download/down
import { DownloadFormatApplicationService } from '~/services/application/download/downloadFormatApplicationService'
import { APITagRepository } from '~/repositories/tag/apiTagRepository'
import { TagApplicationService } from '~/services/application/tag/tagApplicationService'
-import { ApiRelationRepository } from "~/repositories/tasks/sequenceLabeling/apiRelationRepository"
+import { ApiRelationRepository } from '~/repositories/tasks/sequenceLabeling/apiRelationRepository'
export interface Services {
- categoryType: LabelApplicationService,
- spanType: LabelApplicationService,
- relationType: LabelApplicationService,
- member: MemberApplicationService,
- user: UserApplicationService,
- role: RoleApplicationService,
- project: ProjectApplicationService,
- comment: CommentApplicationService,
- metrics: MetricsApplicationService,
- example: ExampleApplicationService,
- textClassification: TextClassificationApplicationService,
- sequenceLabeling: SequenceLabelingApplicationService,
- seq2seq: Seq2seqApplicationService,
- option: OptionApplicationService,
- config: ConfigApplicationService,
- template: TemplateApplicationService,
- auth: AuthApplicationService,
- catalog: CatalogApplicationService,
- parse: ParseApplicationService,
- taskStatus: TaskStatusApplicationService,
- downloadFormat: DownloadFormatApplicationService,
- download: DownloadApplicationService,
- tag: TagApplicationService,
+ categoryType: LabelApplicationService
+ spanType: LabelApplicationService
+ relationType: LabelApplicationService
+ member: MemberApplicationService
+ user: UserApplicationService
+ role: RoleApplicationService
+ project: ProjectApplicationService
+ comment: CommentApplicationService
+ metrics: MetricsApplicationService
+ example: ExampleApplicationService
+ textClassification: TextClassificationService
+ sequenceLabeling: SequenceLabelingApplicationService
+ seq2seq: Seq2seqApplicationService
+ option: OptionApplicationService
+ config: ConfigApplicationService
+ template: TemplateApplicationService
+ auth: AuthApplicationService
+ catalog: CatalogApplicationService
+ parse: ParseApplicationService
+ taskStatus: TaskStatusApplicationService
+ downloadFormat: DownloadFormatApplicationService
+ download: DownloadApplicationService
+ tag: TagApplicationService
}
declare module 'vue/types/vue' {
@@ -76,21 +76,21 @@ declare module 'vue/types/vue' {
}
const plugin: Plugin = (_, inject) => {
- const memberRepository = new APIMemberRepository()
- const userRepository = new APIUserRepository()
- const roleRepository = new APIRoleRepository()
- const projectRepository = new APIProjectRepository()
- const commentRepository = new APICommentRepository()
+ const memberRepository = new APIMemberRepository()
+ const userRepository = new APIUserRepository()
+ const roleRepository = new APIRoleRepository()
+ const projectRepository = new APIProjectRepository()
+ const commentRepository = new APICommentRepository()
const metricsRepository = new APIMetricsRepository()
- const exampleRepository = new APIExampleRepository()
+ const exampleRepository = new APIExampleRepository()
const textClassificationRepository = new APITextClassificationRepository()
- const sequenceLabelingRepository = new APISequenceLabelingRepository()
+ const sequenceLabelingRepository = new APISequenceLabelingRepository()
const linkRepository = new ApiRelationRepository()
const seq2seqRepository = new APISeq2seqRepository()
- const optionRepository = new LocalStorageOptionRepository()
- const configRepository = new APIConfigRepository()
+ const optionRepository = new LocalStorageOptionRepository()
+ const configRepository = new APIConfigRepository()
const tagRepository = new APITagRepository()
- const templateRepository = new APITemplateRepository()
+ const templateRepository = new APITemplateRepository()
const authRepository = new APIAuthRepository()
const catalogRepository = new APICatalogRepository()
const parseRepository = new APIParseRepository()
@@ -98,18 +98,21 @@ const plugin: Plugin = (_, inject) => {
const downloadFormatRepository = new APIDownloadFormatRepository()
const downloadRepository = new APIDownloadRepository()
- const categoryType = new LabelApplicationService(new APILabelRepository('category-type'))
- const spanType = new LabelApplicationService(new APILabelRepository('span-type'))
- const relationType = new LabelApplicationService(new APILabelRepository('relation-type'))
- const member = new MemberApplicationService(memberRepository)
- const user = new UserApplicationService(userRepository)
- const role = new RoleApplicationService(roleRepository)
- const project = new ProjectApplicationService(projectRepository)
- const comment = new CommentApplicationService(commentRepository)
+ const categoryType = new LabelApplicationService(new APILabelRepository('category-type'))
+ const spanType = new LabelApplicationService(new APILabelRepository('span-type'))
+ const relationType = new LabelApplicationService(new APILabelRepository('relation-type'))
+ const member = new MemberApplicationService(memberRepository)
+ const user = new UserApplicationService(userRepository)
+ const role = new RoleApplicationService(roleRepository)
+ const project = new ProjectApplicationService(projectRepository)
+ const comment = new CommentApplicationService(commentRepository)
const metrics = new MetricsApplicationService(metricsRepository)
- const example = new ExampleApplicationService(exampleRepository)
- const textClassification = new TextClassificationApplicationService(textClassificationRepository)
- const sequenceLabeling = new SequenceLabelingApplicationService(sequenceLabelingRepository, linkRepository)
+ const example = new ExampleApplicationService(exampleRepository)
+ const textClassification = new TextClassificationService(textClassificationRepository)
+ const sequenceLabeling = new SequenceLabelingApplicationService(
+ sequenceLabelingRepository,
+ linkRepository
+ )
const seq2seq = new Seq2seqApplicationService(seq2seqRepository)
const option = new OptionApplicationService(optionRepository)
const config = new ConfigApplicationService(configRepository)
@@ -121,7 +124,7 @@ const plugin: Plugin = (_, inject) => {
const taskStatus = new TaskStatusApplicationService(taskStatusRepository)
const downloadFormat = new DownloadFormatApplicationService(downloadFormatRepository)
const download = new DownloadApplicationService(downloadRepository)
-
+
const services: Services = {
categoryType,
spanType,
@@ -145,7 +148,7 @@ const plugin: Plugin = (_, inject) => {
taskStatus,
downloadFormat,
download,
- tag,
+ tag
}
inject('services', services)
}
diff --git a/frontend/plugins/utils.js b/frontend/plugins/utils.js
index fced83963c..1b9f85cc06 100644
--- a/frontend/plugins/utils.js
+++ b/frontend/plugins/utils.js
@@ -1,20 +1,20 @@
-export const idealColor = function(hexString) {
+export const idealColor = function (hexString) {
// W3c offers a formula for calculating ideal color:
// https://www.w3.org/TR/AERT/#color-contrast
const r = parseInt(hexString.substr(1, 2), 16)
const g = parseInt(hexString.substr(3, 2), 16)
const b = parseInt(hexString.substr(5, 2), 16)
- return ((((r * 299) + (g * 587) + (b * 114)) / 1000) < 128) ? '#ffffff' : '#000000'
+ return (r * 299 + g * 587 + b * 114) / 1000 < 128 ? '#ffffff' : '#000000'
}
-export const translatedRoles = function(roles, mappings) {
+export const translatedRoles = function (roles, mappings) {
roles.forEach((role) => {
role.translatedName = translateRole(role.name, mappings)
})
return roles
}
-export const translateRole = function(role, mappings) {
+export const translateRole = function (role, mappings) {
if (role === 'project_admin') {
return mappings.projectAdmin
} else if (role === 'annotator') {
diff --git a/frontend/repositories/auth/apiAuthRepository.ts b/frontend/repositories/auth/apiAuthRepository.ts
index 440a57b08b..4a94ab95ef 100644
--- a/frontend/repositories/auth/apiAuthRepository.ts
+++ b/frontend/repositories/auth/apiAuthRepository.ts
@@ -2,9 +2,7 @@ import ApiService from '@/services/api.service'
import { AuthRepository } from '@/domain/models/auth/authRepository'
export class APIAuthRepository implements AuthRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async login(username: string, password: string): Promise<void> {
const url = `/auth/login/`
diff --git a/frontend/repositories/autoLabeling/config/apiConfigRepository.ts b/frontend/repositories/autoLabeling/config/apiConfigRepository.ts
index 1342addc23..7fe347eca0 100644
--- a/frontend/repositories/autoLabeling/config/apiConfigRepository.ts
+++ b/frontend/repositories/autoLabeling/config/apiConfigRepository.ts
@@ -3,26 +3,22 @@ import { ConfigRepository, ConfigTestResponse } from '~/domain/models/autoLabeli
import { ConfigItemList, ConfigItem } from '~/domain/models/autoLabeling/config'
export interface ConfigItemResponse {
- id: number,
- model_name: string,
- model_attrs: object,
- template: string,
- label_mapping: object,
- task_type: string,
+ id: number
+ model_name: string
+ model_attrs: object
+ template: string
+ label_mapping: object
+ task_type: string
}
export class APIConfigRepository implements ConfigRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async list(projectId: string): Promise<ConfigItemList> {
const url = `/projects/${projectId}/auto-labeling/configs`
const response = await this.request.get(url)
const responseItems: ConfigItemResponse[] = response.data
- return ConfigItemList.valueOf(
- responseItems.map(item => ConfigItem.valueOf(item))
- )
+ return ConfigItemList.valueOf(responseItems.map((item) => ConfigItem.valueOf(item)))
}
async create(projectId: string, item: ConfigItem): Promise<ConfigItem> {
@@ -52,22 +48,36 @@ export class APIConfigRepository implements ConfigRepository {
async testParameters(projectId: string, item: ConfigItem, text: string) {
const url = `/projects/${projectId}/auto-labeling/request-testing`
- const response = await this.request.post(url, {...item.toAPI(), text})
+ const response = await this.request.post(url, { ...item.toAPI(), text })
const responseItem: ConfigTestResponse = response.data
return responseItem
}
- async testTemplate(projectId: string, response: any, item: ConfigItem): Promise<ConfigTestResponse> {
+ async testTemplate(
+ projectId: string,
+ response: any,
+ item: ConfigItem
+ ): Promise<ConfigTestResponse> {
console.log(projectId)
const url = `/projects/${projectId}/auto-labeling/label-extractor-testing`
- const _response = await this.request.post(url, { response, ...item.toAPI() })
+ const _response = await this.request.post(url, {
+ response,
+ ...item.toAPI()
+ })
const responseItem: ConfigTestResponse = _response.data
return responseItem
}
- async testMapping(projectId: string, item: ConfigItem, response: any): Promise<ConfigTestResponse> {
+ async testMapping(
+ projectId: string,
+ item: ConfigItem,
+ response: any
+ ): Promise<ConfigTestResponse> {
const url = `/projects/${projectId}/auto-labeling/label-mapper-testing`
- const _response = await this.request.post(url, {...item.toAPI(), response})
+ const _response = await this.request.post(url, {
+ ...item.toAPI(),
+ response
+ })
const responseItem: ConfigTestResponse = _response.data
return responseItem
}
diff --git a/frontend/repositories/autoLabeling/template/apiTemplateRepository.ts b/frontend/repositories/autoLabeling/template/apiTemplateRepository.ts
index 15b3c84a23..1a53a35da6 100644
--- a/frontend/repositories/autoLabeling/template/apiTemplateRepository.ts
+++ b/frontend/repositories/autoLabeling/template/apiTemplateRepository.ts
@@ -3,9 +3,7 @@ import { TemplateRepository } from '~/domain/models/autoLabeling/templateReposit
import { ConfigTemplateItem, ConfigResponse } from '~/domain/models/autoLabeling/template'
export class APITemplateRepository implements TemplateRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async list(projectId: string, taskName: string): Promise<string[]> {
const url = `/projects/${projectId}/auto-labeling/templates?task_name=${taskName}`
diff --git a/frontend/repositories/celery/apiTaskStatusRepository.ts b/frontend/repositories/celery/apiTaskStatusRepository.ts
index 9ded859264..378f86f35e 100644
--- a/frontend/repositories/celery/apiTaskStatusRepository.ts
+++ b/frontend/repositories/celery/apiTaskStatusRepository.ts
@@ -4,9 +4,7 @@ import { TaskStatusRepository } from '@/domain/models/celery/taskStatusRepositor
import { Status } from '@/domain/models/celery/status'
export class APITaskStatusRepository implements TaskStatusRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async get(taskId: string): Promise<Status> {
const url = `/tasks/status/${taskId}`
diff --git a/frontend/repositories/comment/apiCommentRepository.ts b/frontend/repositories/comment/apiCommentRepository.ts
index 8b21b87178..db04ad4e31 100644
--- a/frontend/repositories/comment/apiCommentRepository.ts
+++ b/frontend/repositories/comment/apiCommentRepository.ts
@@ -3,13 +3,13 @@ import ApiService from '@/services/api.service'
import { CommentRepository, SearchOption } from '@/domain/models/comment/commentRepository'
import { CommentItem, CommentItemList } from '~/domain/models/comment/comment'
-
export class APICommentRepository implements CommentRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
- async listAll(projectId: string, { limit = '10', offset = '0', q = '' }: SearchOption): Promise<CommentItemList> {
+ async listAll(
+ projectId: string,
+ { limit = '10', offset = '0', q = '' }: SearchOption
+ ): Promise<CommentItemList> {
const url = `/projects/${projectId}/comments?q=${q}&limit=${limit}&offset=${offset}`
const response = await this.request.get(url)
return plainToInstance(CommentItemList, response.data)
@@ -23,7 +23,11 @@ export class APICommentRepository implements CommentRepository {
async create(projectId: string, exampleId: number, text: string): Promise<CommentItem> {
const url = `/projects/${projectId}/comments?example=${exampleId}`
- const response = await this.request.post(url, { projectId, exampleId, text })
+ const response = await this.request.post(url, {
+ projectId,
+ exampleId,
+ text
+ })
return plainToInstance(CommentItem, response.data)
}
diff --git a/frontend/repositories/download/apiDownloadFormatRepository.ts b/frontend/repositories/download/apiDownloadFormatRepository.ts
index 804330a0e1..13e5e1e163 100644
--- a/frontend/repositories/download/apiDownloadFormatRepository.ts
+++ b/frontend/repositories/download/apiDownloadFormatRepository.ts
@@ -4,9 +4,7 @@ import { DownloadFormatRepository } from '@/domain/models/download/downloadForma
import { Format } from '~/domain/models/download/format'
export class APIDownloadFormatRepository implements DownloadFormatRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async list(projectId: string): Promise<Format[]> {
const url = `/projects/${projectId}/download-format`
diff --git a/frontend/repositories/download/apiDownloadRepository.ts b/frontend/repositories/download/apiDownloadRepository.ts
index 2938ce18e9..a18271103f 100644
--- a/frontend/repositories/download/apiDownloadRepository.ts
+++ b/frontend/repositories/download/apiDownloadRepository.ts
@@ -2,15 +2,13 @@ import ApiService from '@/services/api.service'
import { DownloadRepository } from '@/domain/models/download/downloadRepository'
export class APIDownloadRepository implements DownloadRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async prepare(projectId: string, format: string, exportApproved: boolean): Promise<string> {
const url = `/projects/${projectId}/download`
const data = {
format,
- exportApproved,
+ exportApproved
}
const response = await this.request.post(url, data)
return response.data.task_id
@@ -19,7 +17,7 @@ export class APIDownloadRepository implements DownloadRepository {
async download(projectId: string, taskId: string): Promise<void> {
const url = `/projects/${projectId}/download?taskId=${taskId}`
const config = {
- responseType: 'blob',
+ responseType: 'blob'
}
const response = await this.request.get(url, config)
const downloadUrl = window.URL.createObjectURL(new Blob([response.data]))
diff --git a/frontend/repositories/example/apiDocumentRepository.ts b/frontend/repositories/example/apiDocumentRepository.ts
index df8a3f5805..08af0992e7 100644
--- a/frontend/repositories/example/apiDocumentRepository.ts
+++ b/frontend/repositories/example/apiDocumentRepository.ts
@@ -4,11 +4,12 @@ import { ExampleRepository, SearchOption } from '~/domain/models/example/example
import { ExampleItem, ExampleItemList } from '~/domain/models/example/example'
export class APIExampleRepository implements ExampleRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
- async list(projectId: string, { limit = '10', offset = '0', q = '', isChecked = '' }: SearchOption): Promise<ExampleItemList> {
+ async list(
+ projectId: string,
+ { limit = '10', offset = '0', q = '', isChecked = '' }: SearchOption
+ ): Promise<ExampleItemList> {
const url = `/projects/${projectId}/examples?limit=${limit}&offset=${offset}&q=${q}&confirmed=${isChecked}`
const response = await this.request.get(url)
return plainToInstance(ExampleItemList, response.data)
diff --git a/frontend/repositories/label/apiLabelRepository.ts b/frontend/repositories/label/apiLabelRepository.ts
index ff35e98242..858040ef8e 100644
--- a/frontend/repositories/label/apiLabelRepository.ts
+++ b/frontend/repositories/label/apiLabelRepository.ts
@@ -4,19 +4,16 @@ import { LabelRepository } from '~/domain/models/label/labelRepository'
import { LabelItem } from '~/domain/models/label/label'
export interface LabelItemResponse {
- id: number,
- text: string,
- prefix_key: string,
- suffix_key: string,
- background_color: string,
+ id: number
+ text: string
+ prefix_key: string
+ suffix_key: string
+ background_color: string
text_color: string
}
export class APILabelRepository implements LabelRepository {
- constructor(
- private readonly baseUrl = 'label',
- private readonly request = ApiService
- ) {}
+ constructor(private readonly baseUrl = 'label', private readonly request = ApiService) {}
async list(projectId: string): Promise<LabelItem[]> {
const url = `/projects/${projectId}/${this.baseUrl}s`
@@ -56,7 +53,7 @@ export class APILabelRepository implements LabelRepository {
}
try {
await this.request.post(url, payload, config)
- } catch(e: any) {
+ } catch (e: any) {
const data = e.response.data
if ('detail' in data) {
throw new Error(data.detail)
diff --git a/frontend/repositories/member/apiMemberRepository.ts b/frontend/repositories/member/apiMemberRepository.ts
index bb8da9e2cc..c64a66c12f 100644
--- a/frontend/repositories/member/apiMemberRepository.ts
+++ b/frontend/repositories/member/apiMemberRepository.ts
@@ -4,9 +4,7 @@ import { MemberRepository } from '@/domain/models/member/memberRepository'
import { MemberItem } from '~/domain/models/member/member'
export class APIMemberRepository implements MemberRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async list(projectId: string): Promise<MemberItem[]> {
const url = `/projects/${projectId}/members`
diff --git a/frontend/repositories/metrics/apiMetricsRepository.ts b/frontend/repositories/metrics/apiMetricsRepository.ts
index 2d2545ab2e..71243fddc7 100644
--- a/frontend/repositories/metrics/apiMetricsRepository.ts
+++ b/frontend/repositories/metrics/apiMetricsRepository.ts
@@ -3,9 +3,7 @@ import { MetricsRepository } from '@/domain/models/metrics/metricsRepository'
import { Distribution, Progress, MyProgress } from '~/domain/models/metrics/metrics'
export class APIMetricsRepository implements MetricsRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async fetchCategoryDistribution(projectId: string): Promise<Distribution> {
const url = `/projects/${projectId}/metrics/category-distribution`
diff --git a/frontend/repositories/option/apiOptionRepository.ts b/frontend/repositories/option/apiOptionRepository.ts
index 4eabac2dd3..c5eafb50dc 100644
--- a/frontend/repositories/option/apiOptionRepository.ts
+++ b/frontend/repositories/option/apiOptionRepository.ts
@@ -2,7 +2,6 @@ import { OptionRepository } from '../../domain/models/option/optionRepository'
import { OptionItem } from '~/domain/models/option/option'
export class LocalStorageOptionRepository implements OptionRepository {
-
findById(projectId: string): OptionItem {
const checkpoint = this.loadCheckpoint()
return OptionItem.valueOf(checkpoint[projectId] ? checkpoint[projectId] : { page: 1 })
diff --git a/frontend/repositories/project/apiProjectRepository.ts b/frontend/repositories/project/apiProjectRepository.ts
index 848c60c9d2..48134434b7 100644
--- a/frontend/repositories/project/apiProjectRepository.ts
+++ b/frontend/repositories/project/apiProjectRepository.ts
@@ -3,11 +3,8 @@ import ApiService from '@/services/api.service'
import { ProjectRepository, SearchOption } from '@/domain/models/project/projectRepository'
import { ProjectReadItem, ProjectWriteItem, ProjectItemList } from '~/domain/models/project/project'
-
export class APIProjectRepository implements ProjectRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async list({ limit = '10', offset = '0', q = '' }: SearchOption): Promise<ProjectItemList> {
const url = `/projects?limit=${limit}&offset=${offset}&q=${q}`
diff --git a/frontend/repositories/role/apiRoleRepository.ts b/frontend/repositories/role/apiRoleRepository.ts
index e7ca6961c7..ee078ca897 100644
--- a/frontend/repositories/role/apiRoleRepository.ts
+++ b/frontend/repositories/role/apiRoleRepository.ts
@@ -4,9 +4,7 @@ import { RoleRepository } from '../../domain/models/role/roleRepository'
import { RoleItem } from '~/domain/models/role/role'
export class APIRoleRepository implements RoleRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async list(): Promise<RoleItem[]> {
const url = `/roles`
diff --git a/frontend/repositories/tag/apiTagRepository.ts b/frontend/repositories/tag/apiTagRepository.ts
index e99b209c4e..58ac8e4ecc 100644
--- a/frontend/repositories/tag/apiTagRepository.ts
+++ b/frontend/repositories/tag/apiTagRepository.ts
@@ -4,9 +4,7 @@ import { TagRepository } from '~/domain/models/tag/tagRepository'
import { TagItem } from '~/domain/models/tag/tag'
export class APITagRepository implements TagRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async list(projectId: string): Promise<TagItem[]> {
const url = `/projects/${projectId}/tags`
diff --git a/frontend/repositories/tasks/seq2seq/apiSeq2seq.ts b/frontend/repositories/tasks/seq2seq/apiSeq2seq.ts
index eea4e162c6..e337c41e87 100644
--- a/frontend/repositories/tasks/seq2seq/apiSeq2seq.ts
+++ b/frontend/repositories/tasks/seq2seq/apiSeq2seq.ts
@@ -1,7 +1,6 @@
import { AnnotationRepository } from '@/domain/models/tasks/annotationRepository'
import { Seq2seqLabel } from '~/domain/models/tasks/seq2seq'
-
export class APISeq2seqRepository extends AnnotationRepository<Seq2seqLabel> {
constructor() {
super(Seq2seqLabel)
diff --git a/frontend/repositories/tasks/sequenceLabeling/apiRelationRepository.ts b/frontend/repositories/tasks/sequenceLabeling/apiRelationRepository.ts
index bfa605498e..2be17e3a2c 100644
--- a/frontend/repositories/tasks/sequenceLabeling/apiRelationRepository.ts
+++ b/frontend/repositories/tasks/sequenceLabeling/apiRelationRepository.ts
@@ -1,38 +1,40 @@
import ApiService from '@/services/api.service'
-import { RelationRepository } from "~/domain/models/tasks/relationRepository"
-import { RelationItem } from "~/domain/models/tasks/relation"
+import { RelationRepository } from '~/domain/models/tasks/relationRepository'
+import { RelationItem } from '~/domain/models/tasks/relation'
export class ApiRelationRepository implements RelationRepository {
- constructor(
- private readonly request = ApiService
- ) {
- }
+ constructor(private readonly request = ApiService) {}
- async list(projectId: string, exampleId: number): Promise<RelationItem[]> {
- const url = `/projects/${projectId}/examples/${exampleId}/relations`
- const response = await this.request.get(url)
- return response.data.map((relation: any) => RelationItem.valueOf(relation))
- }
+ async list(projectId: string, exampleId: number): Promise<RelationItem[]> {
+ const url = `/projects/${projectId}/examples/${exampleId}/relations`
+ const response = await this.request.get(url)
+ return response.data.map((relation: any) => RelationItem.valueOf(relation))
+ }
- async create(projectId: string, exampleId: number, item: RelationItem): Promise<RelationItem> {
- const url = `/projects/${projectId}/examples/${exampleId}/relations`
- const response = await this.request.post(url, item.toObject())
- return RelationItem.valueOf(response.data)
- }
+ async create(projectId: string, exampleId: number, item: RelationItem): Promise<RelationItem> {
+ const url = `/projects/${projectId}/examples/${exampleId}/relations`
+ const response = await this.request.post(url, item.toObject())
+ return RelationItem.valueOf(response.data)
+ }
- async update(projectId: string, exampleId: number, relationId: number, relationType: number): Promise<RelationItem> {
- const url = `/projects/${projectId}/examples/${exampleId}/relations/${relationId}`
- const response = await this.request.patch(url, {type: relationType})
- return RelationItem.valueOf(response.data)
- }
+ async update(
+ projectId: string,
+ exampleId: number,
+ relationId: number,
+ relationType: number
+ ): Promise<RelationItem> {
+ const url = `/projects/${projectId}/examples/${exampleId}/relations/${relationId}`
+ const response = await this.request.patch(url, { type: relationType })
+ return RelationItem.valueOf(response.data)
+ }
- async delete(projectId: string, exampleId: number, relationId: number): Promise<void> {
- const url = `/projects/${projectId}/examples/${exampleId}/relations/${relationId}`
- await this.request.delete(url)
- }
+ async delete(projectId: string, exampleId: number, relationId: number): Promise<void> {
+ const url = `/projects/${projectId}/examples/${exampleId}/relations/${relationId}`
+ await this.request.delete(url)
+ }
- async bulkDelete(projectId: string, exampleId: number, relationIds: number[]): Promise<void> {
- const url = `/projects/${projectId}/examples/${exampleId}/relations`
- await this.request.delete(url, {ids: relationIds})
- }
+ async bulkDelete(projectId: string, exampleId: number, relationIds: number[]): Promise<void> {
+ const url = `/projects/${projectId}/examples/${exampleId}/relations`
+ await this.request.delete(url, { ids: relationIds })
+ }
}
diff --git a/frontend/repositories/tasks/sequenceLabeling/apiSequenceLabeling.ts b/frontend/repositories/tasks/sequenceLabeling/apiSequenceLabeling.ts
index 23ee3cac76..85436677db 100644
--- a/frontend/repositories/tasks/sequenceLabeling/apiSequenceLabeling.ts
+++ b/frontend/repositories/tasks/sequenceLabeling/apiSequenceLabeling.ts
@@ -1,7 +1,6 @@
import { AnnotationRepository } from '@/domain/models/tasks/annotationRepository'
import { Span } from '~/domain/models/tasks/sequenceLabeling'
-
export class APISequenceLabelingRepository extends AnnotationRepository<Span> {
constructor() {
super(Span)
diff --git a/frontend/repositories/tasks/textClassification/apiTextClassification.ts b/frontend/repositories/tasks/textClassification/apiTextClassification.ts
index b70ef886f6..a920be67f0 100644
--- a/frontend/repositories/tasks/textClassification/apiTextClassification.ts
+++ b/frontend/repositories/tasks/textClassification/apiTextClassification.ts
@@ -1,10 +1,9 @@
import { AnnotationRepository } from '@/domain/models/tasks/annotationRepository'
-import { TextClassificationItem } from '~/domain/models/tasks/textClassification'
+import { CategoryItem } from '~/domain/models/tasks/textClassification'
-
-export class APITextClassificationRepository extends AnnotationRepository<TextClassificationItem> {
+export class APITextClassificationRepository extends AnnotationRepository<CategoryItem> {
constructor() {
- super(TextClassificationItem)
+ super(CategoryItem)
}
protected baseUrl(projectId: string, docId: number): string {
diff --git a/frontend/repositories/upload/apiCatalogRepository.ts b/frontend/repositories/upload/apiCatalogRepository.ts
index bb9d3ad2de..8f3f42bb86 100644
--- a/frontend/repositories/upload/apiCatalogRepository.ts
+++ b/frontend/repositories/upload/apiCatalogRepository.ts
@@ -4,9 +4,7 @@ import { CatalogRepository } from '@/domain/models/upload/catalogRepository'
import { Catalog } from '~/domain/models/upload/catalog'
export class APICatalogRepository implements CatalogRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async list(projectId: string): Promise<Catalog[]> {
const url = `/projects/${projectId}/catalog`
diff --git a/frontend/repositories/upload/apiParseRepository.ts b/frontend/repositories/upload/apiParseRepository.ts
index d60ddcad8e..b019e72084 100644
--- a/frontend/repositories/upload/apiParseRepository.ts
+++ b/frontend/repositories/upload/apiParseRepository.ts
@@ -2,11 +2,15 @@ import ApiService from '@/services/api.service'
import { ParseRepository } from '@/domain/models/upload/parseRepository'
export class APIParseRepository implements ParseRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
- async analyze(projectId: string, format: string, task: string, uploadIds: number[], option: object): Promise<string> {
+ async analyze(
+ projectId: string,
+ format: string,
+ task: string,
+ uploadIds: number[],
+ option: object
+ ): Promise<string> {
const url = `/projects/${projectId}/upload`
const data = {
format,
diff --git a/frontend/repositories/user/apiUserRepository.ts b/frontend/repositories/user/apiUserRepository.ts
index 1756ef89d5..3636b9177e 100644
--- a/frontend/repositories/user/apiUserRepository.ts
+++ b/frontend/repositories/user/apiUserRepository.ts
@@ -4,9 +4,7 @@ import { UserRepository } from '@/domain/models/user/userRepository'
import { UserItem } from '~/domain/models/user/user'
export class APIUserRepository implements UserRepository {
- constructor(
- private readonly request = ApiService
- ) {}
+ constructor(private readonly request = ApiService) {}
async getMe(): Promise<UserItem> {
const url = '/me'
diff --git a/frontend/rules/index.js b/frontend/rules/index.js
index 1469c58fe2..a9e5ba7242 100644
--- a/frontend/rules/index.js
+++ b/frontend/rules/index.js
@@ -1,83 +1,67 @@
// Rules for project label.
export const colorRules = (msg) => {
- return [
- v => !!v || msg.colorRequired
- ]
+ return [(v) => !!v || msg.colorRequired]
}
export const labelNameRules = (msg) => {
- return [
- v => !!v || msg.labelRequired,
- v => (v && v.length <= 30) || msg.labelLessThan30Chars
- ]
+ return [(v) => !!v || msg.labelRequired, (v) => (v && v.length <= 30) || msg.labelLessThan30Chars]
}
// Rules for project member.
export const userNameRules = (msg) => {
return [
- v => !!v || msg.userNameRequired,
- v => (v && v.length <= 30) || msg.userNameLessThan30Chars
+ (v) => !!v || msg.userNameRequired,
+ (v) => (v && v.length <= 30) || msg.userNameLessThan30Chars
]
}
export const roleRules = (msg) => {
- return [
- v => !!v || msg.roleRequired
- ]
+ return [(v) => !!v || msg.roleRequired]
}
// Rules for a project.
export const projectNameRules = (msg) => {
return [
- v => !!v || msg.projectNameRequired,
- v => (v && v.length <= 30) || msg.projectNameLessThan30Chars
+ (v) => !!v || msg.projectNameRequired,
+ (v) => (v && v.length <= 30) || msg.projectNameLessThan30Chars
]
}
export const descriptionRules = (msg) => {
return [
- v => !!v || msg.descriptionRequired,
- v => (v && v.length <= 100) || msg.descriptionLessThan30Chars
+ (v) => !!v || msg.descriptionRequired,
+ (v) => (v && v.length <= 100) || msg.descriptionLessThan30Chars
]
}
export const projectTypeRules = (msg) => {
- return [
- v => !!v || msg.projectTypeRequired
- ]
+ return [(v) => !!v || msg.projectTypeRequired]
}
// Rules for Document.
export const fileFormatRules = (msg) => {
- return [
- v => !!v || msg.fileFormatRequired
- ]
+ return [(v) => !!v || msg.fileFormatRequired]
}
export const uploadFileRules = (msg) => {
return [
- v => !!v || msg.fileRequired,
- v => !v || v.some(file => file.size < 100000000) || msg.fileLessThan1MB
+ (v) => !!v || msg.fileRequired,
+ (v) => !v || v.some((file) => file.size < 100000000) || msg.fileLessThan1MB
]
}
export const uploadSingleFileRules = (msg) => {
- return [
- v => !!v || msg.fileRequired,
- v => !v || v.size < 1000000 || msg.fileLessThan1MB
- ]
+ return [(v) => !!v || msg.fileRequired, (v) => !v || v.size < 1000000 || msg.fileLessThan1MB]
}
// Rules for user.
export const passwordRules = (msg) => {
return [
- v => !!v || msg.passwordRequired,
- v => (v && v.length <= 30) || msg.passwordLessThan30Chars
+ (v) => !!v || msg.passwordRequired,
+ (v) => (v && v.length <= 30) || msg.passwordLessThan30Chars
]
}
export const templateNameRules = () => {
- return [
- v => !!v || 'Name is required'
- ]
+ return [(v) => !!v || 'Name is required']
}
diff --git a/frontend/services/application/auth/authApplicationService.ts b/frontend/services/application/auth/authApplicationService.ts
index b3cbd64206..4010ad4fef 100644
--- a/frontend/services/application/auth/authApplicationService.ts
+++ b/frontend/services/application/auth/authApplicationService.ts
@@ -1,9 +1,7 @@
import { AuthRepository } from '~/domain/models/auth/authRepository'
export class AuthApplicationService {
- constructor(
- private readonly repository: AuthRepository
- ) {}
+ constructor(private readonly repository: AuthRepository) {}
public async login(username: string, password: string) {
await this.repository.login(username, password)
diff --git a/frontend/services/application/autoLabeling/configApplicationService.ts b/frontend/services/application/autoLabeling/configApplicationService.ts
index 9b74069658..5a4226570e 100644
--- a/frontend/services/application/autoLabeling/configApplicationService.ts
+++ b/frontend/services/application/autoLabeling/configApplicationService.ts
@@ -2,9 +2,7 @@ import { ConfigRepository } from '~/domain/models/autoLabeling/configRepository'
import { ConfigItemList, ConfigItem } from '~/domain/models/autoLabeling/config'
export class ConfigApplicationService {
- constructor(
- private readonly configRepository: ConfigRepository
- ) {}
+ constructor(private readonly configRepository: ConfigRepository) {}
public list(id: string): Promise<ConfigItemList> {
return this.configRepository.list(id)
@@ -19,36 +17,38 @@ export class ConfigApplicationService {
}
public testParameters(projectId: string, item: ConfigItem, text: string) {
- return this.configRepository.testParameters(projectId, item, text)
- .then((value) => {
- return value
- })
- .catch((error) => {
- const data = error.response.data
- throw new Error(data)
- })
+ return this.configRepository
+ .testParameters(projectId, item, text)
+ .then((value) => {
+ return value
+ })
+ .catch((error) => {
+ const data = error.response.data
+ throw new Error(data)
+ })
}
public testTemplate(projectId: string, response: any, item: ConfigItem) {
- return this.configRepository.testTemplate(projectId, response, item)
- .then((value) => {
- return value
- })
- .catch((error) => {
- const data = error.response.data
- throw new Error(data)
- })
+ return this.configRepository
+ .testTemplate(projectId, response, item)
+ .then((value) => {
+ return value
+ })
+ .catch((error) => {
+ const data = error.response.data
+ throw new Error(data)
+ })
}
public testMapping(projectId: string, item: ConfigItem, response: any) {
- return this.configRepository.testMapping(projectId, item, response)
- .then((value) => {
- return value
- })
- .catch((error) => {
- const data = error.response.data
- throw new Error(data)
- })
+ return this.configRepository
+ .testMapping(projectId, item, response)
+ .then((value) => {
+ return value
+ })
+ .catch((error) => {
+ const data = error.response.data
+ throw new Error(data)
+ })
}
-
}
diff --git a/frontend/services/application/autoLabeling/templateApplicationService.ts b/frontend/services/application/autoLabeling/templateApplicationService.ts
index 919f87f3d2..81abe537cc 100644
--- a/frontend/services/application/autoLabeling/templateApplicationService.ts
+++ b/frontend/services/application/autoLabeling/templateApplicationService.ts
@@ -2,9 +2,7 @@ import { TemplateRepository } from '~/domain/models/autoLabeling/templateReposit
import { ConfigTemplateItem } from '~/domain/models/autoLabeling/template'
export class TemplateApplicationService {
- constructor(
- private readonly repository: TemplateRepository
- ) {}
+ constructor(private readonly repository: TemplateRepository) {}
public list(id: string, taskName: string): Promise<string[]> {
return this.repository.list(id, taskName)
diff --git a/frontend/services/application/celery/taskStatusApplicationService.ts b/frontend/services/application/celery/taskStatusApplicationService.ts
index f7155904c4..df99e7addb 100644
--- a/frontend/services/application/celery/taskStatusApplicationService.ts
+++ b/frontend/services/application/celery/taskStatusApplicationService.ts
@@ -2,9 +2,7 @@ import { TaskStatusRepository } from '@/domain/models/celery/taskStatusRepositor
import { StatusDTO } from './statusData'
export class TaskStatusApplicationService {
- constructor(
- private readonly repository: TaskStatusRepository
- ) {}
+ constructor(private readonly repository: TaskStatusRepository) {}
public async get(taskId: string): Promise<StatusDTO> {
const item = await this.repository.get(taskId)
diff --git a/frontend/services/application/comment/commentApplicationService.ts b/frontend/services/application/comment/commentApplicationService.ts
index 985228bbc7..e88dc94b80 100644
--- a/frontend/services/application/comment/commentApplicationService.ts
+++ b/frontend/services/application/comment/commentApplicationService.ts
@@ -4,18 +4,19 @@ import { CommentRepository, SearchOption } from '~/domain/models/comment/comment
import { CommentItem } from '~/domain/models/comment/comment'
export class CommentApplicationService {
- constructor(
- private readonly repository: CommentRepository
- ) {}
+ constructor(private readonly repository: CommentRepository) {}
- public async listProjectComment(projectId: string, options: SearchOption): Promise<CommentListDTO> {
+ public async listProjectComment(
+ projectId: string,
+ options: SearchOption
+ ): Promise<CommentListDTO> {
const item = await this.repository.listAll(projectId, options)
return new CommentListDTO(item)
}
public async list(projectId: string, docId: number): Promise<CommentReadDTO[]> {
const items = await this.repository.list(projectId, docId)
- return items.map(item => new CommentReadDTO(item))
+ return items.map((item) => new CommentReadDTO(item))
}
public create(projectId: string, docId: number, text: string): Promise<CommentItem> {
@@ -32,7 +33,7 @@ export class CommentApplicationService {
}
public deleteBulk(projectId: string, items: CommentReadDTO[]): Promise<void> {
- const ids = items.map(item => item.id)
+ const ids = items.map((item) => item.id)
return this.repository.deleteBulk(projectId, ids)
}
}
diff --git a/frontend/services/application/comment/commentData.ts b/frontend/services/application/comment/commentData.ts
index af2ffe9fce..ed32102f79 100644
--- a/frontend/services/application/comment/commentData.ts
+++ b/frontend/services/application/comment/commentData.ts
@@ -1,34 +1,33 @@
import { CommentItem, CommentItemList } from '~/domain/models/comment/comment'
-
export class CommentReadDTO {
- id: number;
- user: number;
- username: string;
- example: number;
- text: string;
- createdAt: string;
+ id: number
+ user: number
+ username: string
+ example: number
+ text: string
+ createdAt: string
constructor(item: CommentItem) {
- this.id = item.id;
- this.user = item.user;
- this.username = item.username;
- this.example = item.example;
- this.text = item.text;
- this.createdAt = item.createdAt;
+ this.id = item.id
+ this.user = item.user
+ this.username = item.username
+ this.example = item.example
+ this.text = item.text
+ this.createdAt = item.createdAt
}
}
export class CommentListDTO {
count: number
- next : string | null
- prev : string | null
+ next: string | null
+ prev: string | null
items: CommentReadDTO[]
constructor(item: CommentItemList) {
this.count = item.count
this.next = item.next
this.prev = item.prev
- this.items = item.items.map(_ => new CommentReadDTO(_))
+ this.items = item.items.map((_) => new CommentReadDTO(_))
}
}
diff --git a/frontend/services/application/download/downloadApplicationService.ts b/frontend/services/application/download/downloadApplicationService.ts
index e7075499cb..d636e5a954 100644
--- a/frontend/services/application/download/downloadApplicationService.ts
+++ b/frontend/services/application/download/downloadApplicationService.ts
@@ -1,11 +1,13 @@
import { DownloadRepository } from '~/domain/models/download/downloadRepository'
export class DownloadApplicationService {
- constructor(
- private readonly repository: DownloadRepository
- ) {}
+ constructor(private readonly repository: DownloadRepository) {}
- public async request(projectId: string, format: string, exportApproved: boolean): Promise<string> {
+ public async request(
+ projectId: string,
+ format: string,
+ exportApproved: boolean
+ ): Promise<string> {
const item = await this.repository.prepare(projectId, format, exportApproved)
return item
}
diff --git a/frontend/services/application/download/downloadFormatApplicationService.ts b/frontend/services/application/download/downloadFormatApplicationService.ts
index ebbf4de671..dac649d924 100644
--- a/frontend/services/application/download/downloadFormatApplicationService.ts
+++ b/frontend/services/application/download/downloadFormatApplicationService.ts
@@ -2,12 +2,10 @@ import { FormatDTO } from './formatData'
import { DownloadFormatRepository } from '~/domain/models/download/downloadFormatRepository'
export class DownloadFormatApplicationService {
- constructor(
- private readonly repository: DownloadFormatRepository
- ) {}
+ constructor(private readonly repository: DownloadFormatRepository) {}
public async list(projectId: string): Promise<FormatDTO[]> {
const items = await this.repository.list(projectId)
- return items.map(item => new FormatDTO(item))
+ return items.map((item) => new FormatDTO(item))
}
}
diff --git a/frontend/services/application/download/formatData.ts b/frontend/services/application/download/formatData.ts
index 9294baafd8..f80e35da12 100644
--- a/frontend/services/application/download/formatData.ts
+++ b/frontend/services/application/download/formatData.ts
@@ -1,6 +1,5 @@
import { Format } from '~/domain/models/download/format'
-
export class FormatDTO {
name: string
example: string
diff --git a/frontend/services/application/example/exampleApplicationService.ts b/frontend/services/application/example/exampleApplicationService.ts
index 37cdcf162b..69e8f760fb 100644
--- a/frontend/services/application/example/exampleApplicationService.ts
+++ b/frontend/services/application/example/exampleApplicationService.ts
@@ -4,26 +4,29 @@ import { ExampleRepository, SearchOption } from '~/domain/models/example/example
import { ExampleItem } from '~/domain/models/example/example'
export class ExampleApplicationService {
- constructor(
- private readonly repository: ExampleRepository
- ) {}
+ constructor(private readonly repository: ExampleRepository) {}
public async list(projectId: string, options: SearchOption): Promise<ExampleListDTO> {
try {
const item = await this.repository.list(projectId, options)
return new ExampleListDTO(item)
- } catch(e: any) {
+ } catch (e: any) {
throw new Error(e.response.data.detail)
}
}
- public async fetchOne(projectId: string, page: string, q: string, isChecked: string): Promise<ExampleListDTO> {
+ public async fetchOne(
+ projectId: string,
+ page: string,
+ q: string,
+ isChecked: string
+ ): Promise<ExampleListDTO> {
const offset = (parseInt(page, 10) - 1).toString()
const options: SearchOption = {
limit: '1',
offset,
q,
- isChecked,
+ isChecked
}
return await this.list(projectId, options)
}
@@ -33,7 +36,7 @@ export class ExampleApplicationService {
const doc = this.toModel(item)
const response = await this.repository.create(projectId, doc)
return new ExampleDTO(response)
- } catch(e: any) {
+ } catch (e: any) {
throw new Error(e.response.data.detail)
}
}
@@ -42,13 +45,13 @@ export class ExampleApplicationService {
try {
const doc = this.toModel(item)
await this.repository.update(projectId, doc)
- } catch(e: any) {
+ } catch (e: any) {
throw new Error(e.response.data.detail)
}
}
public bulkDelete(projectId: string, items: ExampleDTO[]): Promise<void> {
- const ids = items.map(item => item.id)
+ const ids = items.map((item) => item.id)
return this.repository.bulkDelete(projectId, ids)
}
diff --git a/frontend/services/application/example/exampleData.ts b/frontend/services/application/example/exampleData.ts
index d368bd05a5..c6aac2e588 100644
--- a/frontend/services/application/example/exampleData.ts
+++ b/frontend/services/application/example/exampleData.ts
@@ -1,17 +1,16 @@
import { ExampleItem, ExampleItemList } from '~/domain/models/example/example'
-
export class ExampleDTO {
- id: number;
- text: string;
- meta: object;
- annotationApprover: boolean | null;
- commentCount: number;
- isApproved: boolean;
- fileUrl: string;
- filename: string;
- url: string;
- isConfirmed: boolean;
+ id: number
+ text: string
+ meta: object
+ annotationApprover: boolean | null
+ commentCount: number
+ isApproved: boolean
+ fileUrl: string
+ filename: string
+ url: string
+ isConfirmed: boolean
constructor(item: ExampleItem) {
this.id = item.id
@@ -29,14 +28,14 @@ export class ExampleDTO {
export class ExampleListDTO {
count: number
- next : string | null
- prev : string | null
+ next: string | null
+ prev: string | null
items: ExampleDTO[]
constructor(item: ExampleItemList) {
this.count = item.count
this.next = item.next
this.prev = item.prev
- this.items = item.items.map(_ => new ExampleDTO(_))
+ this.items = item.items.map((_) => new ExampleDTO(_))
}
}
diff --git a/frontend/services/application/label/labelApplicationService.ts b/frontend/services/application/label/labelApplicationService.ts
index f8a17f1aad..34dec0c99f 100644
--- a/frontend/services/application/label/labelApplicationService.ts
+++ b/frontend/services/application/label/labelApplicationService.ts
@@ -1,17 +1,14 @@
import { LabelDTO } from './labelData'
-import { CreateLabelCommand } from './labelCommand';
+import { CreateLabelCommand } from './labelCommand'
import { LabelRepository } from '~/domain/models/label/labelRepository'
import { LabelItem } from '~/domain/models/label/label'
-
export class LabelApplicationService {
- constructor(
- private readonly repository: LabelRepository
- ) {}
+ constructor(private readonly repository: LabelRepository) {}
public async list(id: string): Promise<LabelDTO[]> {
const items = await this.repository.list(id)
- return items.map(item => new LabelDTO(item))
+ return items.map((item) => new LabelDTO(item))
}
public async findById(projectId: string, labelId: number): Promise<LabelDTO> {
@@ -45,13 +42,13 @@ export class LabelApplicationService {
}
public bulkDelete(projectId: string, items: LabelDTO[]): Promise<void> {
- const ids = items.map(item => item.id)
+ const ids = items.map((item) => item.id)
return this.repository.bulkDelete(projectId, ids)
}
public async export(projectId: string) {
const items = await this.repository.list(projectId)
- const labels = items.map(item => new LabelDTO(item))
+ const labels = items.map((item) => new LabelDTO(item))
const url = window.URL.createObjectURL(new Blob([JSON.stringify(labels, null, 2)]))
const link = document.createElement('a')
link.href = url
diff --git a/frontend/services/application/member/memberApplicationService.ts b/frontend/services/application/member/memberApplicationService.ts
index 5740e96d1c..3ff225d915 100644
--- a/frontend/services/application/member/memberApplicationService.ts
+++ b/frontend/services/application/member/memberApplicationService.ts
@@ -4,15 +4,13 @@ import { MemberRepository } from '~/domain/models/member/memberRepository'
import { MemberItem } from '~/domain/models/member/member'
export class MemberApplicationService {
- constructor(
- private readonly repository: MemberRepository
- ) {}
+ constructor(private readonly repository: MemberRepository) {}
public async list(id: string): Promise<MemberDTO[]> {
try {
const items = await this.repository.list(id)
- return items.map(item => new MemberDTO(item))
- } catch(e: any) {
+ return items.map((item) => new MemberDTO(item))
+ } catch (e: any) {
throw new Error(e.response.data.detail)
}
}
@@ -21,7 +19,7 @@ export class MemberApplicationService {
try {
const member = plainToInstance(MemberItem, item)
await this.repository.create(projectId, member)
- } catch(e: any) {
+ } catch (e: any) {
throw new Error(e.response.data.detail)
}
}
@@ -30,13 +28,13 @@ export class MemberApplicationService {
try {
const member = plainToInstance(MemberItem, item)
await this.repository.update(projectId, member)
- } catch(e: any) {
+ } catch (e: any) {
throw new Error(e.response.data.detail)
}
}
public bulkDelete(projectId: string, items: MemberDTO[]): Promise<void> {
- const ids = items.map(item => item.id)
+ const ids = items.map((item) => item.id)
return this.repository.bulkDelete(projectId, ids)
}
diff --git a/frontend/services/application/member/memberData.ts b/frontend/services/application/member/memberData.ts
index 521085a408..74cf2c9410 100644
--- a/frontend/services/application/member/memberData.ts
+++ b/frontend/services/application/member/memberData.ts
@@ -1,18 +1,17 @@
import { MemberItem } from '~/domain/models/member/member'
-
export class MemberDTO {
- id: number;
- user: number;
- role: number;
- username: string;
- rolename: string;
+ id: number
+ user: number
+ role: number
+ username: string
+ rolename: string
constructor(item: MemberItem) {
- this.id = item.id;
- this.user = item.user;
- this.role = item.role;
- this.username = item.username;
- this.rolename = item.rolename;
+ this.id = item.id
+ this.user = item.user
+ this.role = item.role
+ this.username = item.username
+ this.rolename = item.rolename
}
}
diff --git a/frontend/services/application/metrics/metricsApplicationService.ts b/frontend/services/application/metrics/metricsApplicationService.ts
index fd53078a73..7429714d41 100644
--- a/frontend/services/application/metrics/metricsApplicationService.ts
+++ b/frontend/services/application/metrics/metricsApplicationService.ts
@@ -2,9 +2,7 @@ import { MetricsRepository } from '~/domain/models/metrics/metricsRepository'
import { Progress, Distribution, MyProgress } from '~/domain/models/metrics/metrics'
export class MetricsApplicationService {
- constructor(
- private readonly repository: MetricsRepository
- ) {}
+ constructor(private readonly repository: MetricsRepository) {}
public async fetchMemberProgress(projectId: string): Promise<Progress> {
return await this.repository.fetchMemberProgress(projectId)
diff --git a/frontend/services/application/option/optionApplicationService.ts b/frontend/services/application/option/optionApplicationService.ts
index c536ab4e95..4858f17f4b 100644
--- a/frontend/services/application/option/optionApplicationService.ts
+++ b/frontend/services/application/option/optionApplicationService.ts
@@ -3,9 +3,7 @@ import { OptionRepository } from '~/domain/models/option/optionRepository'
import { OptionItem } from '~/domain/models/option/option'
export class OptionApplicationService {
- constructor(
- private readonly repository: OptionRepository
- ) {}
+ constructor(private readonly repository: OptionRepository) {}
public findOption(projectId: string): OptionDTO {
const item = this.repository.findById(projectId)
diff --git a/frontend/services/application/option/optionData.ts b/frontend/services/application/option/optionData.ts
index f28aff20fe..e295eea91e 100644
--- a/frontend/services/application/option/optionData.ts
+++ b/frontend/services/application/option/optionData.ts
@@ -1,14 +1,13 @@
import { OptionItem } from '~/domain/models/option/option'
-
export class OptionDTO {
- page: number;
- q?: string;
- isChecked?: string;
+ page: number
+ q?: string
+ isChecked?: string
constructor(item: OptionItem) {
- this.page = item.page;
- this.q = item.q;
- this.isChecked = item.isChecked;
+ this.page = item.page
+ this.q = item.q
+ this.isChecked = item.isChecked
}
}
diff --git a/frontend/services/application/project/projectApplicationService.ts b/frontend/services/application/project/projectApplicationService.ts
index 132d51ca9c..51f9c2ef35 100644
--- a/frontend/services/application/project/projectApplicationService.ts
+++ b/frontend/services/application/project/projectApplicationService.ts
@@ -2,17 +2,14 @@ import { ProjectDTO, ProjectWriteDTO, ProjectListDTO } from './projectData'
import { ProjectRepository, SearchOption } from '~/domain/models/project/projectRepository'
import { ProjectWriteItem } from '~/domain/models/project/project'
-
export class ProjectApplicationService {
- constructor(
- private readonly repository: ProjectRepository
- ) {}
+ constructor(private readonly repository: ProjectRepository) {}
public async list(options: SearchOption): Promise<ProjectListDTO> {
try {
const items = await this.repository.list(options)
return new ProjectListDTO(items)
- } catch(e: any) {
+ } catch (e: any) {
throw new Error(e.response.data.detail)
}
}
@@ -27,7 +24,7 @@ export class ProjectApplicationService {
const project = this.toWriteModel(item)
const response = await this.repository.create(project)
return new ProjectDTO(response)
- } catch(e: any) {
+ } catch (e: any) {
throw new Error(e.response.data.detail)
}
}
@@ -37,13 +34,13 @@ export class ProjectApplicationService {
const project = this.toWriteModel(item)
project.tags = []
await this.repository.update(project)
- } catch(e: any) {
+ } catch (e: any) {
throw new Error(e.response.data.detail)
}
}
public bulkDelete(items: ProjectDTO[]): Promise<void> {
- const ids = items.map(item => item.id)
+ const ids = items.map((item) => item.id)
return this.repository.bulkDelete(ids)
}
diff --git a/frontend/services/application/project/projectData.ts b/frontend/services/application/project/projectData.ts
index 1294afea8b..908759b3a5 100644
--- a/frontend/services/application/project/projectData.ts
+++ b/frontend/services/application/project/projectData.ts
@@ -46,18 +46,31 @@ export class ProjectDTO {
}
}
-export type ProjectWriteDTO = Pick<ProjectDTO, 'id' | 'name' | 'description' | 'guideline' | 'projectType' | 'enableRandomOrder' | 'enableShareAnnotation' | 'singleClassClassification' | 'allowOverlapping' | 'graphemeMode' | 'useRelation'> & { tags: string[] }
+export type ProjectWriteDTO = Pick<
+ ProjectDTO,
+ | 'id'
+ | 'name'
+ | 'description'
+ | 'guideline'
+ | 'projectType'
+ | 'enableRandomOrder'
+ | 'enableShareAnnotation'
+ | 'singleClassClassification'
+ | 'allowOverlapping'
+ | 'graphemeMode'
+ | 'useRelation'
+> & { tags: string[] }
export class ProjectListDTO {
count: number
- next : string | null
- prev : string | null
+ next: string | null
+ prev: string | null
items: ProjectDTO[]
constructor(item: ProjectItemList) {
this.count = item.count
this.next = item.next
this.prev = item.prev
- this.items = item.items.map(_ => new ProjectDTO(_))
+ this.items = item.items.map((_) => new ProjectDTO(_))
}
}
diff --git a/frontend/services/application/role/roleApplicationService.ts b/frontend/services/application/role/roleApplicationService.ts
index 04c39e3c82..0f132bf61e 100644
--- a/frontend/services/application/role/roleApplicationService.ts
+++ b/frontend/services/application/role/roleApplicationService.ts
@@ -2,12 +2,10 @@ import { RoleDTO } from './roleData'
import { RoleRepository } from '~/domain/models/role/roleRepository'
export class RoleApplicationService {
- constructor(
- private readonly repository: RoleRepository
- ) {}
+ constructor(private readonly repository: RoleRepository) {}
public async list(): Promise<RoleDTO[]> {
const items = await this.repository.list()
- return items.map(item => new RoleDTO(item))
+ return items.map((item) => new RoleDTO(item))
}
}
diff --git a/frontend/services/application/role/roleData.ts b/frontend/services/application/role/roleData.ts
index a747548fe3..c9c33e09fd 100644
--- a/frontend/services/application/role/roleData.ts
+++ b/frontend/services/application/role/roleData.ts
@@ -1,12 +1,11 @@
import { RoleItem } from '~/domain/models/role/role'
-
export class RoleDTO {
- id: number;
- rolename: string;
+ id: number
+ rolename: string
constructor(item: RoleItem) {
- this.id = item.id;
- this.rolename = item.name;
+ this.id = item.id
+ this.rolename = item.name
}
}
diff --git a/frontend/services/application/tag/tagApplicationService.ts b/frontend/services/application/tag/tagApplicationService.ts
index 5b46b2d2cd..8bce95ce15 100644
--- a/frontend/services/application/tag/tagApplicationService.ts
+++ b/frontend/services/application/tag/tagApplicationService.ts
@@ -1,15 +1,12 @@
import { TagDTO } from './tagData'
import { TagRepository } from '~/domain/models/tag/tagRepository'
-
export class TagApplicationService {
- constructor(
- private readonly repository: TagRepository
- ) {}
+ constructor(private readonly repository: TagRepository) {}
public async list(id: string): Promise<TagDTO[]> {
const items = await this.repository.list(id)
- return items.map(item => new TagDTO(item))
+ return items.map((item) => new TagDTO(item))
}
public create(projectId: string, text: string): void {
diff --git a/frontend/services/application/tasks/annotationApplicationService.ts b/frontend/services/application/tasks/annotationApplicationService.ts
index 4796d69eb1..5cd2bad906 100644
--- a/frontend/services/application/tasks/annotationApplicationService.ts
+++ b/frontend/services/application/tasks/annotationApplicationService.ts
@@ -1,16 +1,13 @@
import { AnnotationRepository } from '~/domain/models/tasks/annotationRepository'
import { AnnotationModel } from '~/domain/models/tasks/interface'
-
export class AnnotationApplicationService<T extends AnnotationModel> {
- constructor(
- readonly repository: AnnotationRepository<T>
- ) {}
+ constructor(readonly repository: AnnotationRepository<T>) {}
public async delete(projectId: string, docId: number, annotationId: number): Promise<void> {
try {
await this.repository.delete(projectId, docId, annotationId)
- } catch(e) {
+ } catch (e) {
console.log(e.response.data.detail)
}
}
diff --git a/frontend/services/application/tasks/seq2seq/seq2seqApplicationService.ts b/frontend/services/application/tasks/seq2seq/seq2seqApplicationService.ts
index b8db47e670..3476eaa0f8 100644
--- a/frontend/services/application/tasks/seq2seq/seq2seqApplicationService.ts
+++ b/frontend/services/application/tasks/seq2seq/seq2seqApplicationService.ts
@@ -4,15 +4,13 @@ import { APISeq2seqRepository } from '~/repositories/tasks/seq2seq/apiSeq2seq'
import { Seq2seqLabel } from '~/domain/models/tasks/seq2seq'
export class Seq2seqApplicationService extends AnnotationApplicationService<Seq2seqLabel> {
- constructor(
- readonly repository: APISeq2seqRepository
- ) {
+ constructor(readonly repository: APISeq2seqRepository) {
super(new APISeq2seqRepository())
}
public async list(projectId: string, docId: number): Promise<Seq2seqDTO[]> {
const items = await this.repository.list(projectId, docId)
- return items.map(item => new Seq2seqDTO(item))
+ return items.map((item) => new Seq2seqDTO(item))
}
public async create(projectId: string, docId: number, text: string): Promise<void> {
@@ -20,7 +18,12 @@ export class Seq2seqApplicationService extends AnnotationApplicationService<Seq2
await this.repository.create(projectId, docId, item)
}
- public async changeText(projectId: string, docId: number, annotationId: number, text: string): Promise<void> {
+ public async changeText(
+ projectId: string,
+ docId: number,
+ annotationId: number,
+ text: string
+ ): Promise<void> {
await this.repository.update(projectId, docId, annotationId, text)
}
}
diff --git a/frontend/services/application/tasks/seq2seq/seq2seqData.ts b/frontend/services/application/tasks/seq2seq/seq2seqData.ts
index ce5c7a8898..4b081082fd 100644
--- a/frontend/services/application/tasks/seq2seq/seq2seqData.ts
+++ b/frontend/services/application/tasks/seq2seq/seq2seqData.ts
@@ -1,14 +1,13 @@
import { Seq2seqLabel } from '~/domain/models/tasks/seq2seq'
-
export class Seq2seqDTO {
- id: number;
- text: string;
- user: number;
+ id: number
+ text: string
+ user: number
constructor(item: Seq2seqLabel) {
- this.id = item.id;
- this.text = item.text;
- this.user = item.user;
+ this.id = item.id
+ this.text = item.text
+ this.user = item.user
}
}
diff --git a/frontend/services/application/tasks/sequenceLabeling/relationData.ts b/frontend/services/application/tasks/sequenceLabeling/relationData.ts
index 66cfab5b7e..c1afe08a82 100644
--- a/frontend/services/application/tasks/sequenceLabeling/relationData.ts
+++ b/frontend/services/application/tasks/sequenceLabeling/relationData.ts
@@ -12,4 +12,4 @@ export class RelationDTO {
this.toId = item.toId
this.labelId = item.type
}
-}
\ No newline at end of file
+}
diff --git a/frontend/services/application/tasks/sequenceLabeling/sequenceLabelingApplicationService.ts b/frontend/services/application/tasks/sequenceLabeling/sequenceLabelingApplicationService.ts
index e6abaf247a..086e00e904 100644
--- a/frontend/services/application/tasks/sequenceLabeling/sequenceLabelingApplicationService.ts
+++ b/frontend/services/application/tasks/sequenceLabeling/sequenceLabelingApplicationService.ts
@@ -3,54 +3,76 @@ import { RelationDTO } from './relationData'
import { SpanDTO } from './sequenceLabelingData'
import { APISequenceLabelingRepository } from '~/repositories/tasks/sequenceLabeling/apiSequenceLabeling'
import { Span } from '~/domain/models/tasks/sequenceLabeling'
-import { RelationRepository } from "~/domain/models/tasks/relationRepository"
-import { RelationItem } from "~/domain/models/tasks/relation"
+import { RelationRepository } from '~/domain/models/tasks/relationRepository'
+import { RelationItem } from '~/domain/models/tasks/relation'
export class SequenceLabelingApplicationService extends AnnotationApplicationService<Span> {
- constructor(
- readonly repository: APISequenceLabelingRepository,
- readonly relationRepository: RelationRepository
- ) {
- super(new APISequenceLabelingRepository())
- }
+ constructor(
+ readonly repository: APISequenceLabelingRepository,
+ readonly relationRepository: RelationRepository
+ ) {
+ super(new APISequenceLabelingRepository())
+ }
- public async list(projectId: string, docId: number): Promise<SpanDTO[]> {
- const items = await this.repository.list(projectId, docId)
- return items.map(item => new SpanDTO(item))
- }
+ public async list(projectId: string, docId: number): Promise<SpanDTO[]> {
+ const items = await this.repository.list(projectId, docId)
+ return items.map((item) => new SpanDTO(item))
+ }
- public async create(projectId: string, docId: number, labelId: number, startOffset: number, endOffset: number): Promise<void> {
- const item = new Span(0, labelId, 0, startOffset, endOffset)
- try {
- await this.repository.create(projectId, docId, item)
- } catch(e: any) {
- console.log(e.response.data.detail)
- }
+ public async create(
+ projectId: string,
+ docId: number,
+ labelId: number,
+ startOffset: number,
+ endOffset: number
+ ): Promise<void> {
+ const item = new Span(0, labelId, 0, startOffset, endOffset)
+ try {
+ await this.repository.create(projectId, docId, item)
+ } catch (e: any) {
+ console.log(e.response.data.detail)
}
+ }
- public async changeLabel(projectId: string, docId: number, annotationId: number, labelId: number): Promise<void> {
- try {
- await this.repository.update(projectId, docId, annotationId, labelId)
- } catch(e: any) {
- console.log(e.response.data.detail)
- }
+ public async changeLabel(
+ projectId: string,
+ docId: number,
+ annotationId: number,
+ labelId: number
+ ): Promise<void> {
+ try {
+ await this.repository.update(projectId, docId, annotationId, labelId)
+ } catch (e: any) {
+ console.log(e.response.data.detail)
}
+ }
- public async listRelations(projectId: string, docId: number): Promise<RelationDTO[]> {
- const items = await this.relationRepository.list(projectId, docId)
- return items.map(item => new RelationDTO(item))
- }
+ public async listRelations(projectId: string, docId: number): Promise<RelationDTO[]> {
+ const items = await this.relationRepository.list(projectId, docId)
+ return items.map((item) => new RelationDTO(item))
+ }
- public async createRelation(projectId: string, docId: number, fromId: number, toId: number, typeId: number): Promise<void> {
- const relation = new RelationItem(0, fromId, toId, typeId)
- await this.relationRepository.create(projectId, docId, relation)
- }
+ public async createRelation(
+ projectId: string,
+ docId: number,
+ fromId: number,
+ toId: number,
+ typeId: number
+ ): Promise<void> {
+ const relation = new RelationItem(0, fromId, toId, typeId)
+ await this.relationRepository.create(projectId, docId, relation)
+ }
- public async deleteRelation(projectId: string, docId: number, relationId: number): Promise<void> {
- await this.relationRepository.delete(projectId, docId, relationId)
- }
+ public async deleteRelation(projectId: string, docId: number, relationId: number): Promise<void> {
+ await this.relationRepository.delete(projectId, docId, relationId)
+ }
- public async updateRelation(projectId: string, docId: number, relationId: number, typeId: number): Promise<void> {
- await this.relationRepository.update(projectId, docId, relationId, typeId)
- }
+ public async updateRelation(
+ projectId: string,
+ docId: number,
+ relationId: number,
+ typeId: number
+ ): Promise<void> {
+ await this.relationRepository.update(projectId, docId, relationId, typeId)
+ }
}
diff --git a/frontend/services/application/tasks/sequenceLabeling/sequenceLabelingData.ts b/frontend/services/application/tasks/sequenceLabeling/sequenceLabelingData.ts
index 3266afc9a0..b5dcc2efdc 100644
--- a/frontend/services/application/tasks/sequenceLabeling/sequenceLabelingData.ts
+++ b/frontend/services/application/tasks/sequenceLabeling/sequenceLabelingData.ts
@@ -1,18 +1,17 @@
import { Span } from '~/domain/models/tasks/sequenceLabeling'
-
export class SpanDTO {
- id: number;
- label: number;
- user: number;
- startOffset: number;
- endOffset: number;
+ id: number
+ label: number
+ user: number
+ startOffset: number
+ endOffset: number
constructor(item: Span) {
- this.id = item.id;
- this.label = item.label;
- this.user = item.user;
- this.startOffset = item.startOffset;
- this.endOffset = item.endOffset;
+ this.id = item.id
+ this.label = item.label
+ this.user = item.user
+ this.startOffset = item.startOffset
+ this.endOffset = item.endOffset
}
}
diff --git a/frontend/services/application/tasks/textClassification/textClassificationApplicationService.ts b/frontend/services/application/tasks/textClassification/textClassificationApplicationService.ts
index 067f53c392..49fb818cc5 100644
--- a/frontend/services/application/tasks/textClassification/textClassificationApplicationService.ts
+++ b/frontend/services/application/tasks/textClassification/textClassificationApplicationService.ts
@@ -1,16 +1,15 @@
import { AnnotationApplicationService } from '../annotationApplicationService'
import { TextClassificationDTO } from './textClassificationData'
-import { TextClassificationItem } from '~/domain/models/tasks/textClassification'
-
-export class TextClassificationApplicationService extends AnnotationApplicationService<TextClassificationItem> {
+import { CategoryItem } from '~/domain/models/tasks/textClassification'
+export class TextClassificationService extends AnnotationApplicationService<CategoryItem> {
public async list(projectId: string, docId: number): Promise<TextClassificationDTO[]> {
const items = await this.repository.list(projectId, docId)
- return items.map(item => new TextClassificationDTO(item))
+ return items.map((item) => new TextClassificationDTO(item))
}
public async create(projectId: string, docId: number, labelId: number): Promise<void> {
- const item = new TextClassificationItem(0, labelId, 0)
+ const item = new CategoryItem(0, labelId, 0)
await this.repository.create(projectId, docId, item)
}
}
diff --git a/frontend/services/application/tasks/textClassification/textClassificationData.ts b/frontend/services/application/tasks/textClassification/textClassificationData.ts
index 6937aa3509..330eaf538a 100644
--- a/frontend/services/application/tasks/textClassification/textClassificationData.ts
+++ b/frontend/services/application/tasks/textClassification/textClassificationData.ts
@@ -1,14 +1,13 @@
-import { TextClassificationItem } from '~/domain/models/tasks/textClassification'
-
+import { CategoryItem } from '~/domain/models/tasks/textClassification'
export class TextClassificationDTO {
- id: number;
- label: number;
- user: number;
+ id: number
+ label: number
+ user: number
- constructor(item: TextClassificationItem) {
- this.id = item.id;
- this.label = item.label;
- this.user = item.user;
+ constructor(item: CategoryItem) {
+ this.id = item.id
+ this.label = item.label
+ this.user = item.user
}
}
diff --git a/frontend/services/application/upload/catalogApplicationService.ts b/frontend/services/application/upload/catalogApplicationService.ts
index 20216319cd..841358d5b8 100644
--- a/frontend/services/application/upload/catalogApplicationService.ts
+++ b/frontend/services/application/upload/catalogApplicationService.ts
@@ -2,12 +2,10 @@ import { CatalogDTO } from './catalogData'
import { CatalogRepository } from '~/domain/models/upload/catalogRepository'
export class CatalogApplicationService {
- constructor(
- private readonly repository: CatalogRepository
- ) {}
+ constructor(private readonly repository: CatalogRepository) {}
public async list(projectId: string): Promise<CatalogDTO[]> {
const items = await this.repository.list(projectId)
- return items.map(item => new CatalogDTO(item))
+ return items.map((item) => new CatalogDTO(item))
}
}
diff --git a/frontend/services/application/upload/catalogData.ts b/frontend/services/application/upload/catalogData.ts
index 781bbc9584..273988d5db 100644
--- a/frontend/services/application/upload/catalogData.ts
+++ b/frontend/services/application/upload/catalogData.ts
@@ -1,6 +1,5 @@
import { Catalog } from '~/domain/models/upload/catalog'
-
export class CatalogDTO {
name: string
example: string
diff --git a/frontend/services/application/upload/parseApplicationService.ts b/frontend/services/application/upload/parseApplicationService.ts
index 888f012da8..2e20c09a60 100644
--- a/frontend/services/application/upload/parseApplicationService.ts
+++ b/frontend/services/application/upload/parseApplicationService.ts
@@ -1,11 +1,15 @@
import { ParseRepository } from '~/domain/models/upload/parseRepository'
export class ParseApplicationService {
- constructor(
- private readonly repository: ParseRepository
- ) {}
+ constructor(private readonly repository: ParseRepository) {}
- public async analyze(projectId: string, format: string, task: string, uploadIds: number[], option: object): Promise<string> {
+ public async analyze(
+ projectId: string,
+ format: string,
+ task: string,
+ uploadIds: number[],
+ option: object
+ ): Promise<string> {
const item = await this.repository.analyze(projectId, format, task, uploadIds, option)
return item
}
diff --git a/frontend/services/application/user/userApplicationService.ts b/frontend/services/application/user/userApplicationService.ts
index c872a77d25..b6312db035 100644
--- a/frontend/services/application/user/userApplicationService.ts
+++ b/frontend/services/application/user/userApplicationService.ts
@@ -2,9 +2,7 @@ import { UserDTO } from './userData'
import { UserRepository } from '~/domain/models/user/userRepository'
export class UserApplicationService {
- constructor(
- private readonly repository: UserRepository
- ) {}
+ constructor(private readonly repository: UserRepository) {}
public async getMyProfile(): Promise<UserDTO> {
const item = await this.repository.getMe()
@@ -13,6 +11,6 @@ export class UserApplicationService {
public async list(query: string): Promise<UserDTO[]> {
const items = await this.repository.list(query)
- return items.map(item => new UserDTO(item))
+ return items.map((item) => new UserDTO(item))
}
}
diff --git a/frontend/services/application/user/userData.ts b/frontend/services/application/user/userData.ts
index 3d0bb5ddfb..36d3a6d639 100644
--- a/frontend/services/application/user/userData.ts
+++ b/frontend/services/application/user/userData.ts
@@ -1,14 +1,13 @@
import { UserItem } from '~/domain/models/user/user'
-
export class UserDTO {
- id: number;
- username: string;
- isStaff: boolean;
+ id: number
+ username: string
+ isStaff: boolean
constructor(item: UserItem) {
- this.id = item.id;
- this.username = item.username;
- this.isStaff = item.isStaff;
+ this.id = item.id
+ this.username = item.username
+ this.isStaff = item.isStaff
}
}
diff --git a/frontend/store/auth.js b/frontend/store/auth.js
index 7f3ab7fc77..a887588147 100644
--- a/frontend/store/auth.js
+++ b/frontend/store/auth.js
@@ -43,7 +43,7 @@ export const actions = {
try {
await this.$services.auth.login(authData.username, authData.password)
commit('setAuthenticated', true)
- } catch(error) {
+ } catch (error) {
throw new Error('The credential is invalid')
}
},
diff --git a/frontend/store/config.js b/frontend/store/config.js
index f38057b26f..04d48b9245 100644
--- a/frontend/store/config.js
+++ b/frontend/store/config.js
@@ -1,5 +1,5 @@
export const state = () => ({
- rtl: false,
+ rtl: false
})
export const mutations = {
@@ -11,11 +11,11 @@ export const mutations = {
export const getters = {
isRTL(state) {
return state.rtl
- },
+ }
}
export const actions = {
toggleRTL({ commit }) {
commit('changeRTLState')
- },
+ }
}
diff --git a/frontend/store/projects.js b/frontend/store/projects.js
index be94e44c60..00e44300a7 100644
--- a/frontend/store/projects.js
+++ b/frontend/store/projects.js
@@ -1,5 +1,5 @@
export const state = () => ({
- current: {},
+ current: {}
})
export const getters = {
@@ -15,7 +15,7 @@ export const getters = {
},
getLink(state) {
return state.current.pageLink
- },
+ }
}
export const mutations = {
@@ -29,7 +29,7 @@ export const actions = {
try {
const response = await this.$services.project.findById(projectId)
commit('setCurrent', response)
- } catch(error) {
+ } catch (error) {
throw new Error(error)
}
}
diff --git a/frontend/test/unit/components/tasks/toolbar/forms/formGuideline.spec.js b/frontend/test/unit/components/tasks/toolbar/forms/formGuideline.spec.js
index 5c6ed4d68f..d784daf299 100644
--- a/frontend/test/unit/components/tasks/toolbar/forms/formGuideline.spec.js
+++ b/frontend/test/unit/components/tasks/toolbar/forms/formGuideline.spec.js
@@ -11,7 +11,7 @@ const factory = () => {
propsData: {
guidelineText: 'Hello'
},
- mocks:{ $t }
+ mocks: { $t }
})
}
diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json
index e48f495a07..c3c0f522f1 100644
--- a/frontend/tsconfig.json
+++ b/frontend/tsconfig.json
@@ -3,11 +3,7 @@
"target": "ES2018",
"module": "ESNext",
"moduleResolution": "Node",
- "lib": [
- "ESNext",
- "ESNext.AsyncIterable",
- "DOM"
- ],
+ "lib": ["ESNext", "ESNext.AsyncIterable", "DOM"],
"esModuleInterop": true,
"allowJs": true,
"sourceMap": true,
@@ -17,20 +13,10 @@
"strictPropertyInitialization": false,
"baseUrl": ".",
"paths": {
- "~/*": [
- "./*"
- ],
- "@/*": [
- "./*"
- ]
+ "~/*": ["./*"],
+ "@/*": ["./*"]
},
- "types": [
- "@types/node",
- "@nuxt/types",
- "nuxt-i18n"
- ]
+ "types": ["@types/node", "@nuxt/types", "nuxt-i18n"]
},
- "exclude": [
- "node_modules"
- ]
+ "exclude": ["node_modules"]
}
diff --git a/frontend/vue-shim.d.ts b/frontend/vue-shim.d.ts
index a456e142ad..6de952b69c 100644
--- a/frontend/vue-shim.d.ts
+++ b/frontend/vue-shim.d.ts
@@ -1,5 +1,5 @@
-declare module "*.vue" {
+declare module '*.vue' {
import Vue from 'vue'
export default Vue
}
-declare module "v-annotator"
\ No newline at end of file
+declare module 'v-annotator'
|
typeddjango__django-stubs-939 | Support mypy 0.950
mypy 0.950 was released a short while ago (https://github.com/python/mypy/releases/tag/v0.950).
`django-stubs` currently enforces a version less than 0.950 (https://github.com/typeddjango/django-stubs/blob/master/setup.py#L23), please enable support for 0.950.
| [
{
"content": "import os\nfrom typing import List\n\nfrom setuptools import find_packages, setup\n\n\ndef find_stub_files(name: str) -> List[str]:\n result = []\n for root, _dirs, files in os.walk(name):\n for file in files:\n if file.endswith(\".pyi\"):\n if os.path.sep in... | [
{
"content": "import os\nfrom typing import List\n\nfrom setuptools import find_packages, setup\n\n\ndef find_stub_files(name: str) -> List[str]:\n result = []\n for root, _dirs, files in os.walk(name):\n for file in files:\n if file.endswith(\".pyi\"):\n if os.path.sep in... | diff --git a/requirements.txt b/requirements.txt
index a96bbd456..ab2789184 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -10,4 +10,4 @@ psycopg2-binary
-e .
# Overrides:
-mypy==0.942
+mypy==0.950
diff --git a/setup.py b/setup.py
index c4987d135..18098e28c 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,7 @@ def find_stub_files(name: str) -> List[str]:
readme = f.read()
dependencies = [
- "mypy>=0.930,<0.950",
+ "mypy>=0.930,<0.960",
"django",
"django-stubs-ext>=0.4.0",
"tomli",
diff --git a/tests/typecheck/db/test_connection.yml b/tests/typecheck/db/test_connection.yml
index 4f3134171..6b3f88bf1 100644
--- a/tests/typecheck/db/test_connection.yml
+++ b/tests/typecheck/db/test_connection.yml
@@ -7,7 +7,7 @@
- case: raw_connections
main: |
from django.db import connections
- reveal_type(connections["test"]) # N: Revealed type is "django.db.backends.base.base.BaseDatabaseWrapper*"
+ reveal_type(connections["test"]) # N: Revealed type is "django.db.backends.base.base.BaseDatabaseWrapper"
for connection in connections.all():
with connection.cursor() as cursor:
reveal_type(cursor) # N: Revealed type is "django.db.backends.utils.CursorWrapper"
diff --git a/tests/typecheck/fields/test_base.yml b/tests/typecheck/fields/test_base.yml
index f9c671fa7..714916213 100644
--- a/tests/typecheck/fields/test_base.yml
+++ b/tests/typecheck/fields/test_base.yml
@@ -2,11 +2,11 @@
main: |
from myapp.models import User
user = User(small_int=1, name='user', slug='user', text='user')
- reveal_type(user.id) # N: Revealed type is "builtins.int*"
- reveal_type(user.small_int) # N: Revealed type is "builtins.int*"
- reveal_type(user.name) # N: Revealed type is "builtins.str*"
- reveal_type(user.slug) # N: Revealed type is "builtins.str*"
- reveal_type(user.text) # N: Revealed type is "builtins.str*"
+ reveal_type(user.id) # N: Revealed type is "builtins.int"
+ reveal_type(user.small_int) # N: Revealed type is "builtins.int"
+ reveal_type(user.name) # N: Revealed type is "builtins.str"
+ reveal_type(user.slug) # N: Revealed type is "builtins.str"
+ reveal_type(user.text) # N: Revealed type is "builtins.str"
installed_apps:
- myapp
files:
@@ -25,9 +25,9 @@
main: |
from myapp.models import Booking
booking = Booking()
- reveal_type(booking.id) # N: Revealed type is "builtins.int*"
+ reveal_type(booking.id) # N: Revealed type is "builtins.int"
reveal_type(booking.time_range) # N: Revealed type is "Any"
- reveal_type(booking.some_decimal) # N: Revealed type is "decimal.Decimal*"
+ reveal_type(booking.some_decimal) # N: Revealed type is "decimal.Decimal"
installed_apps:
- myapp
files:
@@ -47,7 +47,7 @@
disable_cache: true
main: |
from myapp.models import User
- reveal_type(User().id) # N: Revealed type is "builtins.int*"
+ reveal_type(User().id) # N: Revealed type is "builtins.int"
installed_apps:
- myapp
files:
@@ -62,7 +62,7 @@
disable_cache: true
main: |
from myapp.models import User
- reveal_type(User().my_pk) # N: Revealed type is "builtins.int*"
+ reveal_type(User().my_pk) # N: Revealed type is "builtins.int"
User().id # E: "User" has no attribute "id"
installed_apps:
- myapp
@@ -97,7 +97,7 @@
MyModel(notnulltext=None) # E: Incompatible type for "notnulltext" of "MyModel" (got "None", expected "Union[str, int, Combinable]")
MyModel(notnulltext="")
MyModel().notnulltext = None # E: Incompatible types in assignment (expression has type "None", variable has type "Union[str, int, Combinable]")
- reveal_type(MyModel().notnulltext) # N: Revealed type is "builtins.str*"
+ reveal_type(MyModel().notnulltext) # N: Revealed type is "builtins.str"
installed_apps:
- myapp
files:
@@ -133,7 +133,7 @@
- case: fields_inside_mixins_used_in_model_subclasses_resolved_as_primitives
main: |
from myapp.models import MyModel, AuthMixin
- reveal_type(MyModel().username) # N: Revealed type is "builtins.str*"
+ reveal_type(MyModel().username) # N: Revealed type is "builtins.str"
installed_apps:
- myapp
files:
@@ -156,10 +156,10 @@
class Book(models.Model):
published = cast(models.Field[Year, Year], models.IntegerField())
book = Book()
- reveal_type(book.published) # N: Revealed type is "main.Year*"
+ reveal_type(book.published) # N: Revealed type is "main.Year"
book.published = 2006 # E: Incompatible types in assignment (expression has type "int", variable has type "Year")
book.published = Year(2006)
- reveal_type(book.published) # N: Revealed type is "main.Year*"
+ reveal_type(book.published) # N: Revealed type is "main.Year"
def accepts_int(arg: int) -> None: ...
accepts_int(book.published)
@@ -179,4 +179,4 @@
small = models.SmallAutoField(primary_key=True)
obj = MyModel()
- reveal_type(obj.small) # N: Revealed type is "builtins.int*"
+ reveal_type(obj.small) # N: Revealed type is "builtins.int"
diff --git a/tests/typecheck/fields/test_nullable.yml b/tests/typecheck/fields/test_nullable.yml
index f3ddaaa91..16ccc3381 100644
--- a/tests/typecheck/fields/test_nullable.yml
+++ b/tests/typecheck/fields/test_nullable.yml
@@ -34,7 +34,7 @@
- case: nullable_field_with_strict_optional_true
main: |
from myapp.models import MyModel
- reveal_type(MyModel().text) # N: Revealed type is "builtins.str*"
+ reveal_type(MyModel().text) # N: Revealed type is "builtins.str"
reveal_type(MyModel().text_nullable) # N: Revealed type is "Union[builtins.str, None]"
MyModel().text = None # E: Incompatible types in assignment (expression has type "None", variable has type "Union[str, int, Combinable]")
MyModel().text_nullable = None
diff --git a/tests/typecheck/fields/test_postgres_fields.yml b/tests/typecheck/fields/test_postgres_fields.yml
index 7d94fc04b..ebaf4e1a2 100644
--- a/tests/typecheck/fields/test_postgres_fields.yml
+++ b/tests/typecheck/fields/test_postgres_fields.yml
@@ -2,7 +2,7 @@
main: |
from myapp.models import User
user = User(array=[])
- reveal_type(user.array) # N: Revealed type is "builtins.list*[Any]"
+ reveal_type(user.array) # N: Revealed type is "builtins.list[Any]"
installed_apps:
- myapp
files:
@@ -19,8 +19,8 @@
main: |
from myapp.models import User
user = User()
- reveal_type(user.members) # N: Revealed type is "builtins.list*[builtins.int]"
- reveal_type(user.members_as_text) # N: Revealed type is "builtins.list*[builtins.str]"
+ reveal_type(user.members) # N: Revealed type is "builtins.list[builtins.int]"
+ reveal_type(user.members_as_text) # N: Revealed type is "builtins.list[builtins.str]"
installed_apps:
- myapp
files:
diff --git a/tests/typecheck/fields/test_related.yml b/tests/typecheck/fields/test_related.yml
index 439820c5c..5f5cac65f 100644
--- a/tests/typecheck/fields/test_related.yml
+++ b/tests/typecheck/fields/test_related.yml
@@ -2,7 +2,7 @@
main: |
from myapp.models import Book, Publisher
book = Book()
- reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher*"
+ reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher"
publisher = Publisher()
reveal_type(publisher.books) # N: Revealed type is "django.db.models.manager.RelatedManager[myapp.models.Book]"
installed_apps:
@@ -22,8 +22,8 @@
main: |
from myapp.models import Book
book = Book()
- reveal_type(book.publisher_id) # N: Revealed type is "builtins.int*"
- reveal_type(book.owner_id) # N: Revealed type is "builtins.int*"
+ reveal_type(book.publisher_id) # N: Revealed type is "builtins.int"
+ reveal_type(book.owner_id) # N: Revealed type is "builtins.int"
installed_apps:
- django.contrib.auth
- myapp
@@ -42,8 +42,8 @@
main: |
from myapp.models import Book, Publisher
book = Book()
- reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher*"
- reveal_type(book.publisher2) # N: Revealed type is "myapp.models.Publisher*"
+ reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher"
+ reveal_type(book.publisher2) # N: Revealed type is "myapp.models.Publisher"
publisher = Publisher()
reveal_type(publisher.books) # N: Revealed type is "django.db.models.manager.RelatedManager[myapp.models.Book]"
@@ -66,7 +66,7 @@
main: |
from myapp2.models import Book
book = Book()
- reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher*"
+ reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher"
installed_apps:
- myapp
- myapp2
@@ -88,7 +88,7 @@
main: |
from myapp.models import User, Profile
reveal_type(User().profile) # N: Revealed type is "myapp.models.Profile"
- reveal_type(Profile().user) # N: Revealed type is "myapp.models.User*"
+ reveal_type(Profile().user) # N: Revealed type is "myapp.models.User"
installed_apps:
- myapp
files:
@@ -197,7 +197,7 @@
- case: models_imported_inside_init_file_one_to_one_field
main: |
from myapp2.models import Profile
- reveal_type(Profile().user) # N: Revealed type is "myapp.models.user.User*"
+ reveal_type(Profile().user) # N: Revealed type is "myapp.models.user.User"
reveal_type(Profile().user.profile) # N: Revealed type is "myapp2.models.Profile"
installed_apps:
- myapp
@@ -223,7 +223,7 @@
- case: models_triple_circular_reference
main: |
from myapp.models import App
- reveal_type(App().owner) # N: Revealed type is "myapp.models.user.User*"
+ reveal_type(App().owner) # N: Revealed type is "myapp.models.user.User"
reveal_type(App().owner.profile) # N: Revealed type is "myapp.models.profile.Profile"
installed_apps:
- myapp
@@ -253,7 +253,7 @@
- case: many_to_many_field_converts_to_queryset_of_model_type
main: |
from myapp.models import App, Member
- reveal_type(Member().apps) # N: Revealed type is "django.db.models.manager.RelatedManager*[myapp.models.App]"
+ reveal_type(Member().apps) # N: Revealed type is "django.db.models.manager.RelatedManager[myapp.models.App]"
reveal_type(App().members) # N: Revealed type is "django.db.models.manager.RelatedManager[myapp.models.Member]"
installed_apps:
- myapp
@@ -270,7 +270,7 @@
- case: many_to_many_works_with_string_if_imported
main: |
from myapp.models import Member
- reveal_type(Member().apps) # N: Revealed type is "django.db.models.manager.RelatedManager*[myapp2.models.App]"
+ reveal_type(Member().apps) # N: Revealed type is "django.db.models.manager.RelatedManager[myapp2.models.App]"
installed_apps:
- myapp
- myapp2
@@ -291,7 +291,7 @@
- case: foreign_key_with_self
main: |
from myapp.models import User
- reveal_type(User().parent) # N: Revealed type is "myapp.models.User*"
+ reveal_type(User().parent) # N: Revealed type is "myapp.models.User"
installed_apps:
- myapp
files:
@@ -305,7 +305,7 @@
- case: many_to_many_with_self
main: |
from myapp.models import User
- reveal_type(User().friends) # N: Revealed type is "django.db.models.manager.RelatedManager*[myapp.models.User]"
+ reveal_type(User().friends) # N: Revealed type is "django.db.models.manager.RelatedManager[myapp.models.User]"
installed_apps:
- myapp
files:
@@ -354,14 +354,14 @@
import datetime
from myapp.models import Book, Book2
- reveal_type(Book().publisher_id) # N: Revealed type is "builtins.str*"
+ reveal_type(Book().publisher_id) # N: Revealed type is "builtins.str"
Book(publisher_id=1)
Book(publisher_id='hello')
Book(publisher_id=datetime.datetime.now()) # E: Incompatible type for "publisher_id" of "Book" (got "datetime", expected "Union[str, int, Combinable]")
Book.objects.create(publisher_id=1)
Book.objects.create(publisher_id='hello')
- reveal_type(Book2().publisher_id) # N: Revealed type is "builtins.int*"
+ reveal_type(Book2().publisher_id) # N: Revealed type is "builtins.int"
Book2(publisher_id=1)
Book2(publisher_id=[]) # E: Incompatible type for "publisher_id" of "Book2" (got "List[Any]", expected "Union[float, int, str, Combinable]")
Book2.objects.create(publisher_id=1)
@@ -387,7 +387,7 @@
- case: if_model_is_defined_as_name_of_the_class_look_for_it_in_the_same_app
main: |
from myapp.models import Book
- reveal_type(Book().publisher) # N: Revealed type is "myapp.models.publisher.Publisher*"
+ reveal_type(Book().publisher) # N: Revealed type is "myapp.models.publisher.Publisher"
installed_apps:
- myapp
files:
@@ -434,7 +434,7 @@
main: |
from myapp.models import Book, Publisher
book = Book()
- reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher*"
+ reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher"
publisher = Publisher()
reveal_type(publisher.books)
@@ -461,7 +461,7 @@
main: |
from myapp.models import Book
book = Book()
- reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher*"
+ reveal_type(book.publisher) # N: Revealed type is "myapp.models.Publisher"
custom_settings: |
INSTALLED_APPS = ('django.contrib.contenttypes', 'myapp')
BOOK_RELATED_MODEL = 'myapp.Publisher'
@@ -481,7 +481,7 @@
- case: foreign_key_with_custom_app_name
main: |
from myapp.models import MyMain
- reveal_type(MyMain().user) # N: Revealed type is "myapp2.models.MyUser*"
+ reveal_type(MyMain().user) # N: Revealed type is "myapp2.models.MyUser"
installed_apps:
- myapp
- myapp2.apps.MyApp2Config
@@ -509,7 +509,7 @@
- case: related_field_to_extracted_from_function
main: |
from myapp.models import Profile
- reveal_type(Profile().user) # N: Revealed type is "myapp.models.User*"
+ reveal_type(Profile().user) # N: Revealed type is "myapp.models.User"
installed_apps:
- myapp
files:
@@ -576,8 +576,8 @@
- case: test_foreign_key_from_superclass_inherits_correctly
main: |
from myapp.models import MyUser, Book, Article, LibraryEntity
- reveal_type(Book().registered_by_user) # N: Revealed type is "myapp.models.MyUser*"
- reveal_type(Article().registered_by_user) # N: Revealed type is "myapp.models.MyUser*"
+ reveal_type(Book().registered_by_user) # N: Revealed type is "myapp.models.MyUser"
+ reveal_type(Article().registered_by_user) # N: Revealed type is "myapp.models.MyUser"
user = MyUser()
reveal_type(user.book_set) # N: Revealed type is "django.db.models.manager.RelatedManager[myapp.models.Book]"
@@ -604,16 +604,16 @@
- case: test_foreign_key_from_superclass_inherits_correctly_when_also_inheriting_manager
main: |
from myapp.models import MyUser, Book, Article, LibraryEntity
- reveal_type(Book().registered_by_user) # N: Revealed type is "myapp.models.MyUser*"
- reveal_type(Article().registered_by_user) # N: Revealed type is "myapp.models.MyUser*"
+ reveal_type(Book().registered_by_user) # N: Revealed type is "myapp.models.MyUser"
+ reveal_type(Article().registered_by_user) # N: Revealed type is "myapp.models.MyUser"
user = MyUser()
reveal_type(user.book_set) # N: Revealed type is "myapp.models.Book_RelatedManager"
reveal_type(user.article_set) # N: Revealed type is "myapp.models.Article_RelatedManager"
- reveal_type(user.book_set.add) # N: Revealed type is "def (*objs: Union[myapp.models.Book*, builtins.int], *, bulk: builtins.bool =)"
- reveal_type(user.article_set.add) # N: Revealed type is "def (*objs: Union[myapp.models.Article*, builtins.int], *, bulk: builtins.bool =)"
- reveal_type(user.book_set.filter) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.LibraryEntityQuerySet[myapp.models.Book*]"
- reveal_type(user.article_set.filter) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.LibraryEntityQuerySet[myapp.models.Article*]"
+ reveal_type(user.book_set.add) # N: Revealed type is "def (*objs: Union[myapp.models.Book, builtins.int], *, bulk: builtins.bool =)"
+ reveal_type(user.article_set.add) # N: Revealed type is "def (*objs: Union[myapp.models.Article, builtins.int], *, bulk: builtins.bool =)"
+ reveal_type(user.book_set.filter) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.LibraryEntityQuerySet[myapp.models.Book]"
+ reveal_type(user.article_set.filter) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.LibraryEntityQuerySet[myapp.models.Article]"
reveal_type(user.book_set.queryset_method()) # N: Revealed type is "builtins.int"
reveal_type(user.article_set.queryset_method()) # N: Revealed type is "builtins.int"
installed_apps:
@@ -665,8 +665,8 @@
- case: resolve_primary_keys_for_foreign_keys_with_abstract_self_model
main: |
from myapp.models import User
- reveal_type(User().parent) # N: Revealed type is "myapp.models.User*"
- reveal_type(User().parent_id) # N: Revealed type is "builtins.int*"
+ reveal_type(User().parent) # N: Revealed type is "myapp.models.User"
+ reveal_type(User().parent_id) # N: Revealed type is "builtins.int"
reveal_type(User().parent2) # N: Revealed type is "Union[myapp.models.User, None]"
reveal_type(User().parent2_id) # N: Revealed type is "Union[builtins.int, None]"
@@ -690,11 +690,11 @@
main: |
from myapp.models import User, Order, Product
reveal_type(User().orders) # N: Revealed type is "myapp.models.Order_RelatedManager"
- reveal_type(User().orders.get()) # N: Revealed type is "myapp.models.Order*"
+ reveal_type(User().orders.get()) # N: Revealed type is "myapp.models.Order"
reveal_type(User().orders.manager_method()) # N: Revealed type is "builtins.int"
reveal_type(Product.objects.queryset_method()) # N: Revealed type is "builtins.int"
reveal_type(Order().products) # N: Revealed type is "myapp.models.Product_RelatedManager"
- reveal_type(Order().products.get()) # N: Revealed type is "myapp.models.Product*"
+ reveal_type(Order().products.get()) # N: Revealed type is "myapp.models.Product"
reveal_type(Order().products.queryset_method()) # N: Revealed type is "builtins.int"
if 1 == 2:
manager = User().products
@@ -731,11 +731,11 @@
from myapp.models.user import User
reveal_type(Store().purchases) # N: Revealed type is "myapp.models.purchase.Purchase_RelatedManager"
reveal_type(Store().purchases.queryset_method()) # N: Revealed type is "myapp.models.querysets.PurchaseQuerySet"
- reveal_type(Store().purchases.filter()) # N: Revealed type is "myapp.models.querysets.PurchaseQuerySet[myapp.models.purchase.Purchase*]"
+ reveal_type(Store().purchases.filter()) # N: Revealed type is "myapp.models.querysets.PurchaseQuerySet[myapp.models.purchase.Purchase]"
reveal_type(Store().purchases.filter().queryset_method()) # N: Revealed type is "myapp.models.querysets.PurchaseQuerySet"
reveal_type(User().purchases) # N: Revealed type is "myapp.models.purchase.Purchase_RelatedManager"
reveal_type(User().purchases.queryset_method()) # N: Revealed type is "myapp.models.querysets.PurchaseQuerySet"
- reveal_type(User().purchases.filter()) # N: Revealed type is "myapp.models.querysets.PurchaseQuerySet[myapp.models.purchase.Purchase*]"
+ reveal_type(User().purchases.filter()) # N: Revealed type is "myapp.models.querysets.PurchaseQuerySet[myapp.models.purchase.Purchase]"
reveal_type(User().purchases.filter().queryset_method()) # N: Revealed type is "myapp.models.querysets.PurchaseQuerySet"
installed_apps:
- myapp
diff --git a/tests/typecheck/managers/querysets/test_annotate.yml b/tests/typecheck/managers/querysets/test_annotate.yml
index ccfecf63a..adaafd975 100644
--- a/tests/typecheck/managers/querysets/test_annotate.yml
+++ b/tests/typecheck/managers/querysets/test_annotate.yml
@@ -122,10 +122,10 @@
reveal_type(qs) # N: Revealed type is "django.db.models.query._QuerySet[django_stubs_ext.WithAnnotations[myapp__models__User, TypedDict({'foo': Any})], django_stubs_ext.WithAnnotations[myapp__models__User, TypedDict({'foo': Any})]]"
annotated = qs.get()
- reveal_type(annotated) # N: Revealed type is "django_stubs_ext.WithAnnotations[myapp__models__User, TypedDict({'foo': Any})]*"
+ reveal_type(annotated) # N: Revealed type is "django_stubs_ext.WithAnnotations[myapp__models__User, TypedDict({'foo': Any})]"
reveal_type(annotated.foo) # N: Revealed type is "Any"
print(annotated.bar) # E: "WithAnnotations[myapp__models__User, TypedDict({'foo': Any})]" has no attribute "bar"
- reveal_type(annotated.username) # N: Revealed type is "builtins.str*"
+ reveal_type(annotated.username) # N: Revealed type is "builtins.str"
installed_apps:
- myapp
@@ -196,10 +196,10 @@
qs = User.objects.annotate(foo=F('id'))
qs = qs.annotate(bar=F('id'))
annotated = qs.get()
- reveal_type(annotated) # N: Revealed type is "django_stubs_ext.WithAnnotations[myapp__models__User, TypedDict({'foo': Any, 'bar': Any})]*"
+ reveal_type(annotated) # N: Revealed type is "django_stubs_ext.WithAnnotations[myapp__models__User, TypedDict({'foo': Any, 'bar': Any})]"
reveal_type(annotated.foo) # N: Revealed type is "Any"
reveal_type(annotated.bar) # N: Revealed type is "Any"
- reveal_type(annotated.username) # N: Revealed type is "builtins.str*"
+ reveal_type(annotated.username) # N: Revealed type is "builtins.str"
installed_apps:
- myapp
files:
@@ -280,19 +280,19 @@
values_list_flat_known = Blog.objects.annotate(foo=F('id')).values_list('text', flat=True).get()
# Even though it's annotated, we still know the lookup's type.
- reveal_type(values_list_flat_known) # N: Revealed type is "builtins.str*"
+ reveal_type(values_list_flat_known) # N: Revealed type is "builtins.str"
values_list_flat_unknown = Blog.objects.annotate(foo=F('id')).values_list('foo', flat=True).get()
# We don't know the type of an unknown lookup
reveal_type(values_list_flat_unknown) # N: Revealed type is "Any"
values_no_params = Blog.objects.annotate(foo=F('id')).values().get()
- reveal_type(values_no_params) # N: Revealed type is "builtins.dict*[builtins.str, Any]"
+ reveal_type(values_no_params) # N: Revealed type is "builtins.dict[builtins.str, Any]"
values_list_no_params = Blog.objects.annotate(foo=F('id')).values_list().get()
- reveal_type(values_list_no_params) # N: Revealed type is "builtins.tuple*[Any, ...]"
+ reveal_type(values_list_no_params) # N: Revealed type is "builtins.tuple[Any, ...]"
values_list_flat_no_params = Blog.objects.annotate(foo=F('id')).values_list(flat=True).get()
- reveal_type(values_list_flat_no_params) # N: Revealed type is "builtins.int*"
+ reveal_type(values_list_flat_no_params) # N: Revealed type is "builtins.int"
values_list_named_no_params = Blog.objects.annotate(foo=F('id')).values_list(named=True).get()
reveal_type(values_list_named_no_params.foo) # N: Revealed type is "Any"
@@ -324,13 +324,13 @@
before_values_no_params = Blog.objects.values().annotate(foo=F('id')).get()
- reveal_type(before_values_no_params) # N: Revealed type is "builtins.dict*[builtins.str, Any]"
+ reveal_type(before_values_no_params) # N: Revealed type is "builtins.dict[builtins.str, Any]"
before_values_list_no_params = Blog.objects.values_list().annotate(foo=F('id')).get()
- reveal_type(before_values_list_no_params) # N: Revealed type is "builtins.tuple*[Any, ...]"
+ reveal_type(before_values_list_no_params) # N: Revealed type is "builtins.tuple[Any, ...]"
before_values_list_flat_no_params = Blog.objects.values_list(flat=True).annotate(foo=F('id')).get()
- reveal_type(before_values_list_flat_no_params) # N: Revealed type is "builtins.int*"
+ reveal_type(before_values_list_flat_no_params) # N: Revealed type is "builtins.int"
before_values_list_named_no_params = Blog.objects.values_list(named=True).annotate(foo=F('id')).get()
reveal_type(before_values_list_named_no_params.foo) # N: Revealed type is "Any"
diff --git a/tests/typecheck/managers/querysets/test_basic_methods.yml b/tests/typecheck/managers/querysets/test_basic_methods.yml
index 301a07a05..c008415e2 100644
--- a/tests/typecheck/managers/querysets/test_basic_methods.yml
+++ b/tests/typecheck/managers/querysets/test_basic_methods.yml
@@ -4,24 +4,24 @@
from myapp.models import Blog
qs = Blog.objects.all()
- reveal_type(qs) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog*, myapp.models.Blog*]"
- reveal_type(qs.get(id=1)) # N: Revealed type is "myapp.models.Blog*"
- reveal_type(iter(qs)) # N: Revealed type is "typing.Iterator*[myapp.models.Blog*]"
- reveal_type(qs.iterator()) # N: Revealed type is "typing.Iterator[myapp.models.Blog*]"
- reveal_type(qs.first()) # N: Revealed type is "Union[myapp.models.Blog*, None]"
- reveal_type(qs.earliest()) # N: Revealed type is "myapp.models.Blog*"
- reveal_type(qs[0]) # N: Revealed type is "myapp.models.Blog*"
+ reveal_type(qs) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog, myapp.models.Blog]"
+ reveal_type(qs.get(id=1)) # N: Revealed type is "myapp.models.Blog"
+ reveal_type(iter(qs)) # N: Revealed type is "typing.Iterator[myapp.models.Blog]"
+ reveal_type(qs.iterator()) # N: Revealed type is "typing.Iterator[myapp.models.Blog]"
+ reveal_type(qs.first()) # N: Revealed type is "Union[myapp.models.Blog, None]"
+ reveal_type(qs.earliest()) # N: Revealed type is "myapp.models.Blog"
+ reveal_type(qs[0]) # N: Revealed type is "myapp.models.Blog"
reveal_type(qs[:9]) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog, myapp.models.Blog]"
- reveal_type(qs.create()) # N: Revealed type is "myapp.models.Blog*"
- reveal_type(qs.get_or_create()) # N: Revealed type is "Tuple[myapp.models.Blog*, builtins.bool]"
+ reveal_type(qs.create()) # N: Revealed type is "myapp.models.Blog"
+ reveal_type(qs.get_or_create()) # N: Revealed type is "Tuple[myapp.models.Blog, builtins.bool]"
reveal_type(qs.exists()) # N: Revealed type is "builtins.bool"
reveal_type(qs.none()) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog, myapp.models.Blog]"
- reveal_type(qs.update_or_create()) # N: Revealed type is "Tuple[myapp.models.Blog*, builtins.bool]"
+ reveal_type(qs.update_or_create()) # N: Revealed type is "Tuple[myapp.models.Blog, builtins.bool]"
reveal_type(qs.explain()) # N: Revealed type is "builtins.str"
reveal_type(qs.raw(qs.explain())) # N: Revealed type is "django.db.models.query.RawQuerySet[Any]"
# .dates / .datetimes
- reveal_type(Blog.objects.dates("created_at", "day")) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog*, datetime.date]"
- reveal_type(Blog.objects.datetimes("created_at", "day")) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog*, datetime.datetime]"
+ reveal_type(Blog.objects.dates("created_at", "day")) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog, datetime.date]"
+ reveal_type(Blog.objects.datetimes("created_at", "day")) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog, datetime.datetime]"
# AND-ing QuerySets
reveal_type(Blog.objects.all() & Blog.objects.all()) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog, myapp.models.Blog]"
@@ -29,9 +29,9 @@
# bulk methods
reveal_type(qs.count()) # N: Revealed type is "builtins.int"
reveal_type(qs.update(created_at=timezone.now())) # N: Revealed type is "builtins.int"
- reveal_type(qs.in_bulk()) # N: Revealed type is "builtins.dict[Any, myapp.models.Blog*]"
+ reveal_type(qs.in_bulk()) # N: Revealed type is "builtins.dict[Any, myapp.models.Blog]"
reveal_type(qs.bulk_update(list(qs), fields=["created_at"])) # N: Revealed type is "builtins.int"
- reveal_type(qs.bulk_create([])) # N: Revealed type is "builtins.list[myapp.models.Blog*]"
+ reveal_type(qs.bulk_create([])) # N: Revealed type is "builtins.list[myapp.models.Blog]"
reveal_type(qs.delete()) # N: Revealed type is "Tuple[builtins.int, builtins.dict[builtins.str, builtins.int]]"
installed_apps:
- myapp
diff --git a/tests/typecheck/managers/querysets/test_from_queryset.yml b/tests/typecheck/managers/querysets/test_from_queryset.yml
index 07f921569..caa8249d4 100644
--- a/tests/typecheck/managers/querysets/test_from_queryset.yml
+++ b/tests/typecheck/managers/querysets/test_from_queryset.yml
@@ -2,10 +2,10 @@
main: |
from myapp.models import MyModel
reveal_type(MyModel().objects) # N: Revealed type is "myapp.models.NewManager[myapp.models.MyModel]"
- reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel*"
+ reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel"
reveal_type(MyModel().objects.queryset_method()) # N: Revealed type is "builtins.str"
reveal_type(MyModel.objects.filter(id=1).queryset_method()) # N: Revealed type is "builtins.str"
- reveal_type(MyModel.objects.filter(id=1)) # N: Revealed type is "myapp.models.ModelQuerySet[myapp.models.MyModel*]"
+ reveal_type(MyModel.objects.filter(id=1)) # N: Revealed type is "myapp.models.ModelQuerySet[myapp.models.MyModel]"
installed_apps:
- myapp
files:
@@ -27,13 +27,13 @@
from myapp.models import MyModel
reveal_type(MyModel.objects) # N: Revealed type is "myapp.models.NewManager[myapp.models.MyModel]"
reveal_type(MyModel.objects) # N: Revealed type is "myapp.models.NewManager[myapp.models.MyModel]"
- reveal_type(MyModel.objects.get()) # N: Revealed type is "myapp.models.MyModel*"
+ reveal_type(MyModel.objects.get()) # N: Revealed type is "myapp.models.MyModel"
reveal_type(MyModel.objects.queryset_method()) # N: Revealed type is "myapp.querysets.ModelQuerySet"
reveal_type(MyModel.objects.queryset_method_2()) # N: Revealed type is "typing.Iterable[myapp.querysets.Custom]"
reveal_type(MyModel.objects.queryset_method_3()) # N: Revealed type is "builtins.str"
reveal_type(MyModel.objects.queryset_method_4([])) # N: Revealed type is "None"
reveal_type(MyModel.objects.filter(id=1).queryset_method()) # N: Revealed type is "myapp.querysets.ModelQuerySet"
- reveal_type(MyModel.objects.filter(id=1)) # N: Revealed type is "myapp.querysets.ModelQuerySet[myapp.models.MyModel*]"
+ reveal_type(MyModel.objects.filter(id=1)) # N: Revealed type is "myapp.querysets.ModelQuerySet[myapp.models.MyModel]"
installed_apps:
- myapp
files:
@@ -75,7 +75,7 @@
main: |
from myapp.models import MyModel
reveal_type(MyModel().objects) # N: Revealed type is "myapp.models.NewManager[myapp.models.MyModel]"
- reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel*"
+ reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel"
reveal_type(MyModel().objects.queryset_method()) # N: Revealed type is "builtins.str"
installed_apps:
- myapp
@@ -149,7 +149,7 @@
main: |
from myapp.models import MyModel
reveal_type(MyModel().objects) # N: Revealed type is "myapp.models.NewManager[myapp.models.MyModel]"
- reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel*"
+ reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel"
reveal_type(MyModel().objects.queryset_method()) # N: Revealed type is "builtins.str"
installed_apps:
- myapp
@@ -173,7 +173,7 @@
main: |
from myapp.models import MyModel
reveal_type(MyModel().objects) # N: Revealed type is "myapp.managers.NewManager[myapp.models.MyModel]"
- reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel*"
+ reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel"
reveal_type(MyModel().objects.queryset_method) # N: Revealed type is "def (param: Union[builtins.str, None] =) -> Union[builtins.str, None]"
reveal_type(MyModel().objects.queryset_method('str')) # N: Revealed type is "Union[builtins.str, None]"
installed_apps:
@@ -203,7 +203,7 @@
main: |
from myapp.models import MyModel
reveal_type(MyModel().objects) # N: Revealed type is "myapp.managers.NewManager[myapp.models.MyModel]"
- reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel*"
+ reveal_type(MyModel().objects.get()) # N: Revealed type is "myapp.models.MyModel"
reveal_type(MyModel().objects.base_queryset_method) # N: Revealed type is "def (param: Union[builtins.int, builtins.str]) -> <nothing>"
reveal_type(MyModel().objects.base_queryset_method(2)) # N: Revealed type is "<nothing>"
installed_apps:
@@ -350,25 +350,25 @@
- case: from_queryset_includes_methods_returning_queryset
main: |
from myapp.models import MyModel
- reveal_type(MyModel.objects.none) # N: Revealed type is "def () -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.all) # N: Revealed type is "def () -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.filter) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.exclude) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.complex_filter) # N: Revealed type is "def (filter_obj: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.union) # N: Revealed type is "def (*other_qs: Any, *, all: builtins.bool =) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.intersection) # N: Revealed type is "def (*other_qs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.difference) # N: Revealed type is "def (*other_qs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.select_for_update) # N: Revealed type is "def (nowait: builtins.bool =, skip_locked: builtins.bool =, of: typing.Sequence[builtins.str] =, no_key: builtins.bool =) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.select_related) # N: Revealed type is "def (*fields: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.prefetch_related) # N: Revealed type is "def (*lookups: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.annotate) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.alias) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.order_by) # N: Revealed type is "def (*field_names: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.distinct) # N: Revealed type is "def (*field_names: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.reverse) # N: Revealed type is "def () -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.defer) # N: Revealed type is "def (*fields: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.only) # N: Revealed type is "def (*fields: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
- reveal_type(MyModel.objects.using) # N: Revealed type is "def (alias: Union[builtins.str, None]) -> myapp.models.MyQuerySet[myapp.models.MyModel*]"
+ reveal_type(MyModel.objects.none) # N: Revealed type is "def () -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.all) # N: Revealed type is "def () -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.filter) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.exclude) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.complex_filter) # N: Revealed type is "def (filter_obj: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.union) # N: Revealed type is "def (*other_qs: Any, *, all: builtins.bool =) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.intersection) # N: Revealed type is "def (*other_qs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.difference) # N: Revealed type is "def (*other_qs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.select_for_update) # N: Revealed type is "def (nowait: builtins.bool =, skip_locked: builtins.bool =, of: typing.Sequence[builtins.str] =, no_key: builtins.bool =) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.select_related) # N: Revealed type is "def (*fields: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.prefetch_related) # N: Revealed type is "def (*lookups: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.annotate) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.alias) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.order_by) # N: Revealed type is "def (*field_names: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.distinct) # N: Revealed type is "def (*field_names: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.reverse) # N: Revealed type is "def () -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.defer) # N: Revealed type is "def (*fields: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.only) # N: Revealed type is "def (*fields: Any) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
+ reveal_type(MyModel.objects.using) # N: Revealed type is "def (alias: Union[builtins.str, None]) -> myapp.models.MyQuerySet[myapp.models.MyModel]"
installed_apps:
- myapp
files:
diff --git a/tests/typecheck/managers/querysets/test_values_list.yml b/tests/typecheck/managers/querysets/test_values_list.yml
index e954ab964..0ac67638d 100644
--- a/tests/typecheck/managers/querysets/test_values_list.yml
+++ b/tests/typecheck/managers/querysets/test_values_list.yml
@@ -37,7 +37,7 @@
reveal_type(query.all().get()) # N: Revealed type is "Tuple[builtins.str]"
reveal_type(query.filter(age__gt=16).get()) # N: Revealed type is "Tuple[builtins.str]"
reveal_type(query.exclude(age__lte=16).get()) # N: Revealed type is "Tuple[builtins.str]"
- reveal_type(query.annotate(name_length=Length("name")).get()) # N: Revealed type is "builtins.tuple*[Any, ...]"
+ reveal_type(query.annotate(name_length=Length("name")).get()) # N: Revealed type is "builtins.tuple[Any, ...]"
installed_apps:
- myapp
files:
@@ -79,12 +79,12 @@
- case: values_list_flat_true_methods
main: |
from myapp.models import MyUser, MyUser2
- reveal_type(MyUser.objects.values_list('name', flat=True).get()) # N: Revealed type is "builtins.str*"
+ reveal_type(MyUser.objects.values_list('name', flat=True).get()) # N: Revealed type is "builtins.str"
reveal_type(MyUser.objects.values_list('name', 'age', flat=True).get())
# flat=True without specified fields returns primary key values
- reveal_type(MyUser.objects.values_list(flat=True)[0]) # N: Revealed type is "builtins.int*"
- reveal_type(MyUser2.objects.values_list(flat=True)[0]) # N: Revealed type is "builtins.str*"
+ reveal_type(MyUser.objects.values_list(flat=True)[0]) # N: Revealed type is "builtins.int"
+ reveal_type(MyUser2.objects.values_list(flat=True)[0]) # N: Revealed type is "builtins.str"
out: |
main:3: error: 'flat' is not valid when 'values_list' is called with more than one field
main:3: note: Revealed type is "Any"
@@ -217,7 +217,7 @@
reveal_type(Blog.objects.values_list('id', flat=True)) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog, builtins.int]"
reveal_type(Blog.objects.values_list('publisher_id', flat=True)) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.Blog, builtins.int]"
# is Iterable[int]
- reveal_type(list(Blog.objects.values_list('id', flat=True))) # N: Revealed type is "builtins.list[builtins.int*]"
+ reveal_type(list(Blog.objects.values_list('id', flat=True))) # N: Revealed type is "builtins.list[builtins.int]"
installed_apps:
- myapp
files:
diff --git a/tests/typecheck/managers/test_managers.yml b/tests/typecheck/managers/test_managers.yml
index 9ee3eb388..2323b28ec 100644
--- a/tests/typecheck/managers/test_managers.yml
+++ b/tests/typecheck/managers/test_managers.yml
@@ -2,7 +2,7 @@
main: |
from myapp.models import User
reveal_type(User.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.User]"
- reveal_type(User.objects.get()) # N: Revealed type is "myapp.models.User*"
+ reveal_type(User.objects.get()) # N: Revealed type is "myapp.models.User"
installed_apps:
- myapp
files:
@@ -59,7 +59,7 @@
main: |
from myapp.models import Base, MyModel
base_instance = Base(MyModel)
- reveal_type(base_instance.model_cls._base_manager) # N: Revealed type is "django.db.models.manager.BaseManager[myapp.models.MyModel*]"
+ reveal_type(base_instance.model_cls._base_manager) # N: Revealed type is "django.db.models.manager.BaseManager[myapp.models.MyModel]"
installed_apps:
- myapp
files:
@@ -78,7 +78,7 @@
pass
class Child(Base[MyModel]):
def method(self) -> None:
- reveal_type(self.model_cls._base_manager) # N: Revealed type is "django.db.models.manager.BaseManager[myapp.models.MyModel*]"
+ reveal_type(self.model_cls._base_manager) # N: Revealed type is "django.db.models.manager.BaseManager[myapp.models.MyModel]"
- case: if_custom_manager_defined_it_is_set_to_default_manager
main: |
@@ -126,7 +126,7 @@
main: |
from myapp.models import MyUser
reveal_type(MyUser.objects) # N: Revealed type is "myapp.models.UserManager[myapp.models.MyUser]"
- reveal_type(MyUser.objects.get()) # N: Revealed type is "myapp.models.MyUser*"
+ reveal_type(MyUser.objects.get()) # N: Revealed type is "myapp.models.MyUser"
reveal_type(MyUser.objects.get_or_404()) # N: Revealed type is "myapp.models.MyUser"
installed_apps:
- myapp
@@ -222,10 +222,10 @@
main: |
from myapp.models import UnrelatedModel, MyModel
reveal_type(UnrelatedModel.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.UnrelatedModel]"
- reveal_type(UnrelatedModel.objects.first()) # N: Revealed type is "Union[myapp.models.UnrelatedModel*, None]"
+ reveal_type(UnrelatedModel.objects.first()) # N: Revealed type is "Union[myapp.models.UnrelatedModel, None]"
reveal_type(MyModel.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.MyModel]"
- reveal_type(MyModel.objects.first()) # N: Revealed type is "Union[myapp.models.MyModel*, None]"
+ reveal_type(MyModel.objects.first()) # N: Revealed type is "Union[myapp.models.MyModel, None]"
installed_apps:
- myapp
files:
@@ -243,10 +243,10 @@
main: |
from myapp.models import UnrelatedModel2, MyModel2
reveal_type(UnrelatedModel2.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.UnrelatedModel2]"
- reveal_type(UnrelatedModel2.objects.first()) # N: Revealed type is "Union[myapp.models.UnrelatedModel2*, None]"
+ reveal_type(UnrelatedModel2.objects.first()) # N: Revealed type is "Union[myapp.models.UnrelatedModel2, None]"
reveal_type(MyModel2.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.MyModel2]"
- reveal_type(MyModel2.objects.first()) # N: Revealed type is "Union[myapp.models.MyModel2*, None]"
+ reveal_type(MyModel2.objects.first()) # N: Revealed type is "Union[myapp.models.MyModel2, None]"
installed_apps:
- myapp
files:
@@ -264,10 +264,10 @@
main: |
from myapp.models import ParentOfMyModel3, MyModel3
reveal_type(ParentOfMyModel3.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.ParentOfMyModel3]"
- reveal_type(ParentOfMyModel3.objects.first()) # N: Revealed type is "Union[myapp.models.ParentOfMyModel3*, None]"
+ reveal_type(ParentOfMyModel3.objects.first()) # N: Revealed type is "Union[myapp.models.ParentOfMyModel3, None]"
reveal_type(MyModel3.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.MyModel3]"
- reveal_type(MyModel3.objects.first()) # N: Revealed type is "Union[myapp.models.MyModel3*, None]"
+ reveal_type(MyModel3.objects.first()) # N: Revealed type is "Union[myapp.models.MyModel3, None]"
installed_apps:
- myapp
files:
@@ -285,10 +285,10 @@
main: |
from myapp.models import ParentOfMyModel4, MyModel4
reveal_type(ParentOfMyModel4.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.ParentOfMyModel4]"
- reveal_type(ParentOfMyModel4.objects.first()) # N: Revealed type is "Union[myapp.models.ParentOfMyModel4*, None]"
+ reveal_type(ParentOfMyModel4.objects.first()) # N: Revealed type is "Union[myapp.models.ParentOfMyModel4, None]"
reveal_type(MyModel4.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.MyModel4]"
- reveal_type(MyModel4.objects.first()) # N: Revealed type is "Union[myapp.models.MyModel4*, None]"
+ reveal_type(MyModel4.objects.first()) # N: Revealed type is "Union[myapp.models.MyModel4, None]"
installed_apps:
- myapp
files:
@@ -333,15 +333,15 @@
main: |
from myapp.models import User
reveal_type(User.objects) # N: Revealed type is "myapp.models.User_MyManager2[myapp.models.User]"
- reveal_type(User.objects.select_related()) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.User*, myapp.models.User*]"
- reveal_type(User.objects.get()) # N: Revealed type is "myapp.models.User*"
+ reveal_type(User.objects.select_related()) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.User, myapp.models.User]"
+ reveal_type(User.objects.get()) # N: Revealed type is "myapp.models.User"
reveal_type(User.objects.get_instance()) # N: Revealed type is "builtins.int"
reveal_type(User.objects.get_instance_untyped('hello')) # N: Revealed type is "Any"
from myapp.models import ChildUser
reveal_type(ChildUser.objects) # N: Revealed type is "myapp.models.ChildUser_MyManager2[myapp.models.ChildUser]"
- reveal_type(ChildUser.objects.select_related()) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.ChildUser*, myapp.models.ChildUser*]"
- reveal_type(ChildUser.objects.get()) # N: Revealed type is "myapp.models.ChildUser*"
+ reveal_type(ChildUser.objects.select_related()) # N: Revealed type is "django.db.models.query._QuerySet[myapp.models.ChildUser, myapp.models.ChildUser]"
+ reveal_type(ChildUser.objects.get()) # N: Revealed type is "myapp.models.ChildUser"
reveal_type(ChildUser.objects.get_instance()) # N: Revealed type is "builtins.int"
reveal_type(ChildUser.objects.get_instance_untyped('hello')) # N: Revealed type is "Any"
installed_apps:
diff --git a/tests/typecheck/models/test_contrib_models.yml b/tests/typecheck/models/test_contrib_models.yml
index 94409e463..6a2838d34 100644
--- a/tests/typecheck/models/test_contrib_models.yml
+++ b/tests/typecheck/models/test_contrib_models.yml
@@ -1,14 +1,14 @@
- case: contrib_auth_model_fields
main: |
from django.contrib.auth.models import User
- reveal_type(User().username) # N: Revealed type is "builtins.str*"
- reveal_type(User().password) # N: Revealed type is "builtins.str*"
- reveal_type(User().first_name) # N: Revealed type is "builtins.str*"
- reveal_type(User().last_name) # N: Revealed type is "builtins.str*"
- reveal_type(User().email) # N: Revealed type is "builtins.str*"
+ reveal_type(User().username) # N: Revealed type is "builtins.str"
+ reveal_type(User().password) # N: Revealed type is "builtins.str"
+ reveal_type(User().first_name) # N: Revealed type is "builtins.str"
+ reveal_type(User().last_name) # N: Revealed type is "builtins.str"
+ reveal_type(User().email) # N: Revealed type is "builtins.str"
reveal_type(User().is_staff) # N: Revealed type is "builtins.bool"
reveal_type(User().is_active) # N: Revealed type is "builtins.bool"
- reveal_type(User().date_joined) # N: Revealed type is "datetime.datetime*"
+ reveal_type(User().date_joined) # N: Revealed type is "datetime.datetime"
reveal_type(User().last_login) # N: Revealed type is "Union[datetime.datetime, None]"
reveal_type(User().is_authenticated) # N: Revealed type is "Literal[True]"
reveal_type(User().is_anonymous) # N: Revealed type is "Literal[False]"
@@ -18,11 +18,11 @@
reveal_type(AnonymousUser().is_anonymous) # N: Revealed type is "Literal[True]"
from django.contrib.auth.models import Permission
- reveal_type(Permission().name) # N: Revealed type is "builtins.str*"
- reveal_type(Permission().codename) # N: Revealed type is "builtins.str*"
+ reveal_type(Permission().name) # N: Revealed type is "builtins.str"
+ reveal_type(Permission().codename) # N: Revealed type is "builtins.str"
from django.contrib.auth.models import PermissionsMixin
reveal_type(PermissionsMixin().is_superuser) # N: Revealed type is "builtins.bool"
from django.contrib.auth.models import Group
- reveal_type(Group().name) # N: Revealed type is "builtins.str*"
+ reveal_type(Group().name) # N: Revealed type is "builtins.str"
diff --git a/tests/typecheck/models/test_create.yml b/tests/typecheck/models/test_create.yml
index 3376adfde..2d6c1a2cc 100644
--- a/tests/typecheck/models/test_create.yml
+++ b/tests/typecheck/models/test_create.yml
@@ -19,9 +19,9 @@
main: |
from myapp.models import Child
c = Child.objects.create(name='Maxim', lastname='Maxim2')
- reveal_type(c.id) # N: Revealed type is "builtins.int*"
- reveal_type(c.name) # N: Revealed type is "builtins.str*"
- reveal_type(c.lastname) # N: Revealed type is "builtins.str*"
+ reveal_type(c.id) # N: Revealed type is "builtins.int"
+ reveal_type(c.name) # N: Revealed type is "builtins.str"
+ reveal_type(c.lastname) # N: Revealed type is "builtins.str"
installed_apps:
- myapp
files:
@@ -96,19 +96,19 @@
main: |
from myapp.models import MyModel
first = MyModel(id=None)
- reveal_type(first.id) # N: Revealed type is "builtins.int*"
+ reveal_type(first.id) # N: Revealed type is "builtins.int"
first = MyModel.objects.create(id=None)
- reveal_type(first.id) # N: Revealed type is "builtins.int*"
+ reveal_type(first.id) # N: Revealed type is "builtins.int"
first = MyModel()
first.id = None
- reveal_type(first.id) # N: Revealed type is "builtins.int*"
+ reveal_type(first.id) # N: Revealed type is "builtins.int"
from myapp.models import MyModel2
MyModel2(id=None) # E: Incompatible type for "id" of "MyModel2" (got "None", expected "Union[float, int, str, Combinable]")
MyModel2.objects.create(id=None) # E: Incompatible type for "id" of "MyModel2" (got "None", expected "Union[float, int, str, Combinable]")
second = MyModel2()
second.id = None # E: Incompatible types in assignment (expression has type "None", variable has type "Union[float, int, str, Combinable]")
- reveal_type(second.id) # N: Revealed type is "builtins.int*"
+ reveal_type(second.id) # N: Revealed type is "builtins.int"
# default set but no primary key doesn't allow None
from myapp.models import MyModel3
@@ -116,7 +116,7 @@
MyModel3.objects.create(default=None) # E: Incompatible type for "default" of "MyModel3" (got "None", expected "Union[float, int, str, Combinable]")
third = MyModel3()
third.default = None # E: Incompatible types in assignment (expression has type "None", variable has type "Union[float, int, str, Combinable]")
- reveal_type(third.default) # N: Revealed type is "builtins.int*"
+ reveal_type(third.default) # N: Revealed type is "builtins.int"
installed_apps:
- myapp
files:
diff --git a/tests/typecheck/models/test_inheritance.yml b/tests/typecheck/models/test_inheritance.yml
index bc04f5d1f..55e7e338c 100644
--- a/tests/typecheck/models/test_inheritance.yml
+++ b/tests/typecheck/models/test_inheritance.yml
@@ -69,7 +69,7 @@
- case: fields_recognized_if_base_model_is_subclass_of_models_model
main: |
from myapp.models import User
- reveal_type(User().username) # N: Revealed type is "builtins.str*"
+ reveal_type(User().username) # N: Revealed type is "builtins.str"
installed_apps:
- myapp
files:
@@ -90,8 +90,8 @@
- case: django_contrib_gis_base_model_mixin_inheritance
main: |
from myapp.models import User
- reveal_type(User().name) # N: Revealed type is "builtins.str*"
- reveal_type(User().updated_at) # N: Revealed type is "datetime.datetime*"
+ reveal_type(User().name) # N: Revealed type is "builtins.str"
+ reveal_type(User().updated_at) # N: Revealed type is "datetime.datetime"
installed_apps:
- myapp
files:
diff --git a/tests/typecheck/models/test_init.yml b/tests/typecheck/models/test_init.yml
index 2fcf84ccc..ebc100671 100644
--- a/tests/typecheck/models/test_init.yml
+++ b/tests/typecheck/models/test_init.yml
@@ -283,10 +283,10 @@
unset_set_type=[],
)
out: |
- main:4: note: Revealed type is "builtins.int*"
- main:5: note: Revealed type is "builtins.int*"
- main:6: note: Revealed type is "builtins.list*[builtins.int]"
- main:7: note: Revealed type is "builtins.int*"
+ main:4: note: Revealed type is "builtins.int"
+ main:5: note: Revealed type is "builtins.int"
+ main:6: note: Revealed type is "builtins.list[builtins.int]"
+ main:7: note: Revealed type is "builtins.int"
main:8: note: Revealed type is "Any"
main:9: error: Incompatible types in assignment (expression has type "str", variable has type "int")
main:10: error: Incompatible types in assignment (expression has type "str", variable has type "Union[int, float]")
diff --git a/tests/typecheck/models/test_primary_key.yml b/tests/typecheck/models/test_primary_key.yml
index 1c989d6dd..a715b9476 100644
--- a/tests/typecheck/models/test_primary_key.yml
+++ b/tests/typecheck/models/test_primary_key.yml
@@ -2,8 +2,8 @@
main: |
from myapp.models import MyModel
x = MyModel.objects.get(id=1)
- reveal_type(x.id) # N: Revealed type is "builtins.int*"
- reveal_type(x.pk) # N: Revealed type is "builtins.int*"
+ reveal_type(x.id) # N: Revealed type is "builtins.int"
+ reveal_type(x.pk) # N: Revealed type is "builtins.int"
MyModel.objects.get(pk=1)
installed_apps:
@@ -15,16 +15,16 @@
from django.db import models
class MyModel(models.Model):
def __str__(self):
- reveal_type(self.id) # N: Revealed type is "builtins.int*"
- reveal_type(self.pk) # N: Revealed type is "builtins.int*"
+ reveal_type(self.id) # N: Revealed type is "builtins.int"
+ reveal_type(self.pk) # N: Revealed type is "builtins.int"
- case: test_access_to_id_field_through_self_if_primary_key_is_defined
main: |
from myapp.models import MyModel
x = MyModel.objects.get(id='a')
- reveal_type(x.id) # N: Revealed type is "builtins.str*"
- reveal_type(x.pk) # N: Revealed type is "builtins.str*"
+ reveal_type(x.id) # N: Revealed type is "builtins.str"
+ reveal_type(x.pk) # N: Revealed type is "builtins.str"
MyModel.objects.get(pk='a')
installed_apps:
@@ -37,16 +37,16 @@
class MyModel(models.Model):
id = models.CharField(max_length=10, primary_key=True)
def __str__(self):
- reveal_type(self.id) # N: Revealed type is "builtins.str*"
- reveal_type(self.pk) # N: Revealed type is "builtins.str*"
+ reveal_type(self.id) # N: Revealed type is "builtins.str"
+ reveal_type(self.pk) # N: Revealed type is "builtins.str"
- case: test_access_to_id_field_through_self_if_primary_key_has_different_name
main: |
from myapp.models import MyModel
x = MyModel.objects.get(primary='a')
- reveal_type(x.primary) # N: Revealed type is "builtins.str*"
- reveal_type(x.pk) # N: Revealed type is "builtins.str*"
+ reveal_type(x.primary) # N: Revealed type is "builtins.str"
+ reveal_type(x.pk) # N: Revealed type is "builtins.str"
x.id # E: "MyModel" has no attribute "id"
MyModel.objects.get(pk='a')
@@ -61,6 +61,6 @@
class MyModel(models.Model):
primary = models.CharField(max_length=10, primary_key=True)
def __str__(self):
- reveal_type(self.primary) # N: Revealed type is "builtins.str*"
- reveal_type(self.pk) # N: Revealed type is "builtins.str*"
+ reveal_type(self.primary) # N: Revealed type is "builtins.str"
+ reveal_type(self.pk) # N: Revealed type is "builtins.str"
self.id # E: "MyModel" has no attribute "id"
diff --git a/tests/typecheck/models/test_proxy_models.yml b/tests/typecheck/models/test_proxy_models.yml
index 7648756c6..5e2eaa4db 100644
--- a/tests/typecheck/models/test_proxy_models.yml
+++ b/tests/typecheck/models/test_proxy_models.yml
@@ -4,7 +4,7 @@
Blog(publisher=Publisher())
Blog.objects.create(publisher=Publisher())
Blog().publisher = Publisher()
- reveal_type(Blog().publisher) # N: Revealed type is "myapp.models.PublisherProxy*"
+ reveal_type(Blog().publisher) # N: Revealed type is "myapp.models.PublisherProxy"
installed_apps:
- myapp
files:
diff --git a/tests/typecheck/test/test_client.yml b/tests/typecheck/test/test_client.yml
index 212946531..e7ce034c7 100644
--- a/tests/typecheck/test/test_client.yml
+++ b/tests/typecheck/test/test_client.yml
@@ -26,8 +26,8 @@
from django.test.client import RequestFactory, AsyncRequestFactory
factory = RequestFactory()
request = factory.get('foo')
- reveal_type(request) # N: Revealed type is "django.core.handlers.wsgi.WSGIRequest*"
+ reveal_type(request) # N: Revealed type is "django.core.handlers.wsgi.WSGIRequest"
async_factory = AsyncRequestFactory()
async_request = async_factory.get('foo')
- reveal_type(async_request) # N: Revealed type is "django.core.handlers.asgi.ASGIRequest*"
+ reveal_type(async_request) # N: Revealed type is "django.core.handlers.asgi.ASGIRequest"
diff --git a/tests/typecheck/test_config.yml b/tests/typecheck/test_config.yml
index 295dc2069..91569061a 100644
--- a/tests/typecheck/test_config.yml
+++ b/tests/typecheck/test_config.yml
@@ -2,8 +2,8 @@
main: |
from myapp.models import MyModel
mymodel = MyModel(user_id=1)
- reveal_type(mymodel.id) # N: Revealed type is "builtins.int*"
- reveal_type(mymodel.user) # N: Revealed type is "django.contrib.auth.models.User*"
+ reveal_type(mymodel.id) # N: Revealed type is "builtins.int"
+ reveal_type(mymodel.user) # N: Revealed type is "django.contrib.auth.models.User"
reveal_type(mymodel.objects) # N: Revealed type is "django.db.models.manager.Manager[myapp.models.MyModel]"
mypy_config: |
[mypy.plugins.django-stubs]
@@ -20,13 +20,13 @@
class MyModel(models.Model):
user = models.ForeignKey('auth.User', on_delete=models.CASCADE)
if TYPE_CHECKING:
- reveal_type(MyModel().user) # N: Revealed type is "django.contrib.auth.models.User*"
+ reveal_type(MyModel().user) # N: Revealed type is "django.contrib.auth.models.User"
- case: generate_pyproject_toml_and_settings_file_from_installed_apps_key
main: |
from myapp.models import MyModel
mymodel = MyModel(user_id=1)
- reveal_type(mymodel.id) # N: Revealed type is "builtins.int*"
+ reveal_type(mymodel.id) # N: Revealed type is "builtins.int"
installed_apps:
- django.contrib.auth
- myapp
diff --git a/tests/typecheck/test_formsets.yml b/tests/typecheck/test_formsets.yml
index ed773142b..073798a04 100644
--- a/tests/typecheck/test_formsets.yml
+++ b/tests/typecheck/test_formsets.yml
@@ -6,7 +6,7 @@
ArticleFS: Type[forms.BaseInlineFormSet[Article, Category, Any]] = forms.inlineformset_factory(Category, Article)
ArticleFS(instance=Article()) # E: Argument "instance" to "BaseInlineFormSet" has incompatible type "Article"; expected "Optional[Category]"
fs = ArticleFS(instance=Category())
- reveal_type(fs.instance) # N: Revealed type is "myapp.models.Category*"
+ reveal_type(fs.instance) # N: Revealed type is "myapp.models.Category"
installed_apps:
- myapp
files:
diff --git a/tests/typecheck/test_settings.yml b/tests/typecheck/test_settings.yml
index 4cdb26325..70874fa11 100644
--- a/tests/typecheck/test_settings.yml
+++ b/tests/typecheck/test_settings.yml
@@ -5,8 +5,8 @@
# standard settings
reveal_type(settings.AUTH_USER_MODEL) # N: Revealed type is "builtins.str"
reveal_type(settings.ROOT_DIR) # N: Revealed type is "builtins.str"
- reveal_type(settings.APPS_DIR) # N: Revealed type is "pathlib.Path*"
- reveal_type(settings.NUMBERS) # N: Revealed type is "builtins.list[builtins.str*]"
+ reveal_type(settings.APPS_DIR) # N: Revealed type is "pathlib.Path"
+ reveal_type(settings.NUMBERS) # N: Revealed type is "builtins.list[builtins.str]"
reveal_type(settings.DICT) # N: Revealed type is "builtins.dict[Any, Any]"
custom_settings: |
from base import *
@@ -37,8 +37,8 @@
from settings.basic_settings import *
main: |
from django.conf import settings
- reveal_type(settings.MEDIA_ROOT) # N: Revealed type is "pathlib.Path*"
- reveal_type(settings.MEDIA_ROOT / 'part') # N: Revealed type is "pathlib.Path*"
+ reveal_type(settings.MEDIA_ROOT) # N: Revealed type is "pathlib.Path"
+ reveal_type(settings.MEDIA_ROOT / 'part') # N: Revealed type is "pathlib.Path"
files:
- path: settings/__init__.py
- path: settings/basic_settings.py
diff --git a/tests/typecheck/test_shortcuts.yml b/tests/typecheck/test_shortcuts.yml
index 6659878fd..976656b04 100644
--- a/tests/typecheck/test_shortcuts.yml
+++ b/tests/typecheck/test_shortcuts.yml
@@ -3,13 +3,13 @@
from django.shortcuts import get_object_or_404, get_list_or_404
from myapp.models import MyModel
- reveal_type(get_object_or_404(MyModel)) # N: Revealed type is "myapp.models.MyModel*"
- reveal_type(get_object_or_404(MyModel.objects)) # N: Revealed type is "myapp.models.MyModel*"
- reveal_type(get_object_or_404(MyModel.objects.get_queryset())) # N: Revealed type is "myapp.models.MyModel*"
+ reveal_type(get_object_or_404(MyModel)) # N: Revealed type is "myapp.models.MyModel"
+ reveal_type(get_object_or_404(MyModel.objects)) # N: Revealed type is "myapp.models.MyModel"
+ reveal_type(get_object_or_404(MyModel.objects.get_queryset())) # N: Revealed type is "myapp.models.MyModel"
- reveal_type(get_list_or_404(MyModel)) # N: Revealed type is "builtins.list[myapp.models.MyModel*]"
- reveal_type(get_list_or_404(MyModel.objects)) # N: Revealed type is "builtins.list[myapp.models.MyModel*]"
- reveal_type(get_list_or_404(MyModel.objects.get_queryset())) # N: Revealed type is "builtins.list[myapp.models.MyModel*]"
+ reveal_type(get_list_or_404(MyModel)) # N: Revealed type is "builtins.list[myapp.models.MyModel]"
+ reveal_type(get_list_or_404(MyModel.objects)) # N: Revealed type is "builtins.list[myapp.models.MyModel]"
+ reveal_type(get_list_or_404(MyModel.objects.get_queryset())) # N: Revealed type is "builtins.list[myapp.models.MyModel]"
installed_apps:
- myapp
files:
diff --git a/tests/typecheck/utils/test_datastructures.yml b/tests/typecheck/utils/test_datastructures.yml
index b152545a3..13a48d8d3 100644
--- a/tests/typecheck/utils/test_datastructures.yml
+++ b/tests/typecheck/utils/test_datastructures.yml
@@ -10,39 +10,39 @@
d3: Tuple[Tuple[str, List[Union[str, int]]], ...] = (('foo', ['Foo']), ('bar', [2, 3]))
var3 = MultiValueDict(d3)
reveal_type(var1) # N: Revealed type is "django.utils.datastructures.MultiValueDict[Any, Any]"
- reveal_type(var2) # N: Revealed type is "django.utils.datastructures.MultiValueDict[builtins.str*, Union[builtins.str, builtins.int]]"
- reveal_type(var3) # N: Revealed type is "django.utils.datastructures.MultiValueDict[builtins.str*, Union[builtins.str, builtins.int]]"
+ reveal_type(var2) # N: Revealed type is "django.utils.datastructures.MultiValueDict[builtins.str, Union[builtins.str, builtins.int]]"
+ reveal_type(var3) # N: Revealed type is "django.utils.datastructures.MultiValueDict[builtins.str, Union[builtins.str, builtins.int]]"
# __getitem__, get, getlist (with proofs)
d = MultiValueDict({'foo': ['Foo']})
d.setlist('bar', [])
# actually 'Foo'
- reveal_type(d['foo']) # N: Revealed type is "Union[builtins.str*, builtins.list[builtins.object]]"
+ reveal_type(d['foo']) # N: Revealed type is "Union[builtins.str, builtins.list[builtins.object]]"
# actually []
- reveal_type(d['bar']) # N: Revealed type is "Union[builtins.str*, builtins.list[builtins.object]]"
+ reveal_type(d['bar']) # N: Revealed type is "Union[builtins.str, builtins.list[builtins.object]]"
# actually None
- reveal_type(d.get('bar')) # N: Revealed type is "Union[builtins.str*, None]"
+ reveal_type(d.get('bar')) # N: Revealed type is "Union[builtins.str, None]"
# actually 1
- reveal_type(d.get('bar', 1)) # N: Revealed type is "Union[builtins.str, builtins.int*]"
+ reveal_type(d.get('bar', 1)) # N: Revealed type is "Union[builtins.str, builtins.int]"
# actually []
reveal_type(d.getlist('bar')) # N: Revealed type is "builtins.list[builtins.str]"
# actually []
- reveal_type(d.getlist('bar', [1])) # N: Revealed type is "Union[builtins.list[builtins.str], builtins.list*[builtins.int*]]"
+ reveal_type(d.getlist('bar', [1])) # N: Revealed type is "Union[builtins.list[builtins.str], builtins.list[builtins.int]]"
# actually True (note that default can be not a list)
- reveal_type(d.getlist('baz', True)) # N: Revealed type is "Union[builtins.list[builtins.str], builtins.bool*]"
+ reveal_type(d.getlist('baz', True)) # N: Revealed type is "Union[builtins.list[builtins.str], builtins.bool]"
# setters
- reveal_type(d.setlistdefault('baz')) # N: Revealed type is "builtins.list[builtins.str*]"
+ reveal_type(d.setlistdefault('baz')) # N: Revealed type is "builtins.list[builtins.str]"
d.setlistdefault('baz', [1]) # E: List item 0 has incompatible type "int"; expected "str"
- reveal_type(d.setlistdefault('baz', [])) # N: Revealed type is "builtins.list[builtins.str*]"
+ reveal_type(d.setlistdefault('baz', [])) # N: Revealed type is "builtins.list[builtins.str]"
d.appendlist('baz', 'Baz')
d.appendlist('baz', 1) # E: Argument 2 to "appendlist" of "MultiValueDict" has incompatible type "int"; expected "str"
# iterators
# actually [('foo', 'Foo'), ('bar', [])]
- reveal_type(list(d.items())) # N: Revealed type is "builtins.list[Tuple[builtins.str*, Union[builtins.str*, builtins.list[builtins.object]]]]"
- reveal_type(list(d.keys())) # N: Revealed type is "builtins.list[builtins.str*]"
+ reveal_type(list(d.items())) # N: Revealed type is "builtins.list[Tuple[builtins.str, Union[builtins.str, builtins.list[builtins.object]]]]"
+ reveal_type(list(d.keys())) # N: Revealed type is "builtins.list[builtins.str]"
# actually ['Foo', []]
- reveal_type(list(d.values())) # N: Revealed type is "builtins.list[Union[builtins.str*, builtins.list[builtins.object]]]"
+ reveal_type(list(d.values())) # N: Revealed type is "builtins.list[Union[builtins.str, builtins.list[builtins.object]]]"
# actually {'foo': 'Foo', 'bar': []}
- reveal_type(d.dict()) # N: Revealed type is "builtins.dict[builtins.str*, Union[builtins.str*, builtins.list[builtins.object]]]"
+ reveal_type(d.dict()) # N: Revealed type is "builtins.dict[builtins.str, Union[builtins.str, builtins.list[builtins.object]]]"
diff --git a/tests/typecheck/utils/test_encoding.yml b/tests/typecheck/utils/test_encoding.yml
index 102d319ae..3816895f9 100644
--- a/tests/typecheck/utils/test_encoding.yml
+++ b/tests/typecheck/utils/test_encoding.yml
@@ -5,10 +5,10 @@
pass
reveal_type(force_bytes(123)) # N: Revealed type is "builtins.bytes"
- reveal_type(force_bytes(123, strings_only=True)) # N: Revealed type is "builtins.int*"
+ reveal_type(force_bytes(123, strings_only=True)) # N: Revealed type is "builtins.int"
reveal_type(force_str(123)) # N: Revealed type is "builtins.str"
- reveal_type(force_str(123, strings_only=True)) # N: Revealed type is "builtins.int*"
- reveal_type(force_str('foo')) # N: Revealed type is "builtins.str*"
- reveal_type(force_str('foo', strings_only=True)) # N: Revealed type is "builtins.str*"
- reveal_type(force_str(S('foo'), strings_only=True)) # N: Revealed type is "main.S*"
+ reveal_type(force_str(123, strings_only=True)) # N: Revealed type is "builtins.int"
+ reveal_type(force_str('foo')) # N: Revealed type is "builtins.str"
+ reveal_type(force_str('foo', strings_only=True)) # N: Revealed type is "builtins.str"
+ reveal_type(force_str(S('foo'), strings_only=True)) # N: Revealed type is "main.S"
diff --git a/tests/typecheck/utils/test_functional.yml b/tests/typecheck/utils/test_functional.yml
index e0c18fff6..eb4417bfa 100644
--- a/tests/typecheck/utils/test_functional.yml
+++ b/tests/typecheck/utils/test_functional.yml
@@ -7,12 +7,12 @@
@cached_property
def attr(self) -> List[str]: ...
- reveal_type(attr) # N: Revealed type is "django.utils.functional.cached_property[builtins.list*[builtins.str]]"
+ reveal_type(attr) # N: Revealed type is "django.utils.functional.cached_property[builtins.list[builtins.str]]"
reveal_type(attr.name) # N: Revealed type is "builtins.str"
- reveal_type(Foo.attr) # N: Revealed type is "django.utils.functional.cached_property[builtins.list*[builtins.str]]"
- reveal_type(Foo.attr.func) # N: Revealed type is "def (*Any, **Any) -> builtins.list*[builtins.str]"
+ reveal_type(Foo.attr) # N: Revealed type is "django.utils.functional.cached_property[builtins.list[builtins.str]]"
+ reveal_type(Foo.attr.func) # N: Revealed type is "def (*Any, **Any) -> builtins.list[builtins.str]"
f = Foo()
- reveal_type(f.attr) # N: Revealed type is "builtins.list*[builtins.str]"
+ reveal_type(f.attr) # N: Revealed type is "builtins.list[builtins.str]"
f.attr.name # E: "List[str]" has no attribute "name"
diff --git a/tests/typecheck/views/generic/test_edit.yml b/tests/typecheck/views/generic/test_edit.yml
index 569a7e16d..895e5795b 100644
--- a/tests/typecheck/views/generic/test_edit.yml
+++ b/tests/typecheck/views/generic/test_edit.yml
@@ -46,11 +46,11 @@
class MyCreateView(CreateView[Article, ArticleModelForm]):
def some(self) -> None:
- reveal_type(self.get_form_class()) # N: Revealed type is "Type[main.ArticleModelForm*]"
+ reveal_type(self.get_form_class()) # N: Revealed type is "Type[main.ArticleModelForm]"
class MyUpdateView(UpdateView[Article, ArticleModelForm]):
def some(self) -> None:
- reveal_type(self.get_form_class()) # N: Revealed type is "Type[main.ArticleModelForm*]"
+ reveal_type(self.get_form_class()) # N: Revealed type is "Type[main.ArticleModelForm]"
installed_apps:
- myapp
files:
@@ -79,7 +79,7 @@
class MyCreateView(CreateView[Article, ArticleModelForm]):
def some(self) -> None:
- reveal_type(self.get_form()) # N: Revealed type is "main.ArticleModelForm*"
+ reveal_type(self.get_form()) # N: Revealed type is "main.ArticleModelForm"
reveal_type(self.get_form(SubArticleModelForm)) # N: Revealed type is "main.SubArticleModelForm"
reveal_type(self.get_form(AnotherArticleModelForm)) # N: Revealed type is "main.AnotherArticleModelForm" # E: Argument 1 to "get_form" of "FormMixin" has incompatible type "Type[AnotherArticleModelForm]"; expected "Optional[Type[ArticleModelForm]]"
installed_apps:
diff --git a/tests/typecheck/views/test_function_based_views.yml b/tests/typecheck/views/test_function_based_views.yml
index 21ec80018..97f99f203 100644
--- a/tests/typecheck/views/test_function_based_views.yml
+++ b/tests/typecheck/views/test_function_based_views.yml
@@ -24,25 +24,25 @@
def empty_response(request: HttpRequest) -> HttpResponse:
response = HttpResponse()
- reveal_type(response.content) # N: Revealed type is "builtins.bytes*"
+ reveal_type(response.content) # N: Revealed type is "builtins.bytes"
return response
def str_response(request: HttpRequest) -> HttpResponse:
response = HttpResponse()
response.content = 'It works!'
- reveal_type(response.content) # N: Revealed type is "builtins.bytes*"
+ reveal_type(response.content) # N: Revealed type is "builtins.bytes"
return response
def bytes_response(request: HttpRequest) -> HttpResponse:
response = HttpResponse()
response.content = b'It works!'
- reveal_type(response.content) # N: Revealed type is "builtins.bytes*"
+ reveal_type(response.content) # N: Revealed type is "builtins.bytes"
return response
def object_response(request: HttpRequest) -> HttpResponse:
response = HttpResponse()
response.content = _('It works!')
- reveal_type(response.content) # N: Revealed type is "builtins.bytes*"
+ reveal_type(response.content) # N: Revealed type is "builtins.bytes"
return response
- case: streaming_http_response
@@ -74,29 +74,29 @@
def empty_response(request: HttpRequest) -> StreamingHttpResponse:
response = StreamingHttpResponse()
- reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator*[builtins.bytes]"
+ reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator[builtins.bytes]"
return response
def str_response(request: HttpRequest) -> StreamingHttpResponse:
response = StreamingHttpResponse()
response.streaming_content = ['It works!']
- reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator*[builtins.bytes]"
+ reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator[builtins.bytes]"
return response
def bytes_response(request: HttpRequest) -> StreamingHttpResponse:
response = StreamingHttpResponse()
response.streaming_content = [b'It works!']
- reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator*[builtins.bytes]"
+ reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator[builtins.bytes]"
return response
def object_response(request: HttpRequest) -> StreamingHttpResponse:
response = StreamingHttpResponse()
response.streaming_content = [_('It works!')]
- reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator*[builtins.bytes]"
+ reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator[builtins.bytes]"
return response
def mixed_response(request: HttpRequest) -> StreamingHttpResponse:
response = StreamingHttpResponse()
response.streaming_content = [_('Yes'), '/', _('No')]
- reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator*[builtins.bytes]"
+ reveal_type(response.streaming_content) # N: Revealed type is "typing.Iterator[builtins.bytes]"
return response
|
scikit-hep__pyhf-1041 | Class returned by pyhf.Workspace.combine
# Question
Not a bug so I'm opening this as a question: The `pyhf.Workspace.combine` classmethod returns a `Workspace` explicitly instead of `cls`.
https://github.com/scikit-hep/pyhf/blob/e260626689f46414be185d834499cc65dce5a4b0/src/pyhf/workspace.py#L678
To work better with classes that want to inherit from `pyhf.Workspace`, I think it would be better to return the class as
```python
return cls(newspec)
```
# Relevant Issues and Pull Requests
none I'm aware of
| [
{
"content": "\"\"\"\npyhf workspaces hold the three data items:\n\n* the statistical model p(data|parameters)\n* the observed data (optional)\n* fit configurations (\"measurements\")\n\"\"\"\nimport logging\nimport jsonpatch\nimport copy\nimport collections\nfrom . import exceptions\nfrom . import utils\nfrom ... | [
{
"content": "\"\"\"\npyhf workspaces hold the three data items:\n\n* the statistical model p(data|parameters)\n* the observed data (optional)\n* fit configurations (\"measurements\")\n\"\"\"\nimport logging\nimport jsonpatch\nimport copy\nimport collections\nfrom . import exceptions\nfrom . import utils\nfrom ... | diff --git a/src/pyhf/workspace.py b/src/pyhf/workspace.py
index d2515bb3bb..6534bed5b0 100644
--- a/src/pyhf/workspace.py
+++ b/src/pyhf/workspace.py
@@ -675,4 +675,4 @@ def combine(cls, left, right, join='none'):
'observations': new_observations,
'version': new_version,
}
- return Workspace(newspec)
+ return cls(newspec)
diff --git a/tests/test_workspace.py b/tests/test_workspace.py
index b67c2306a7..91d908b21a 100644
--- a/tests/test_workspace.py
+++ b/tests/test_workspace.py
@@ -717,3 +717,32 @@ def test_workspace_equality(workspace_factory):
assert ws == ws
assert ws == ws_other
assert ws != 'not a workspace'
+
+
+def test_workspace_inheritance(workspace_factory):
+ ws = workspace_factory()
+ new_ws = ws.rename(
+ channels={'channel1': 'channel3', 'channel2': 'channel4'},
+ samples={
+ 'background1': 'background3',
+ 'background2': 'background4',
+ 'signal': 'signal2',
+ },
+ modifiers={
+ 'syst1': 'syst4',
+ 'bkg1Shape': 'bkg3Shape',
+ 'bkg2Shape': 'bkg4Shape',
+ },
+ measurements={
+ 'GaussExample': 'OtherGaussExample',
+ 'GammaExample': 'OtherGammaExample',
+ 'ConstExample': 'OtherConstExample',
+ 'LogNormExample': 'OtherLogNormExample',
+ },
+ )
+
+ class FooWorkspace(pyhf.Workspace):
+ pass
+
+ combined = FooWorkspace.combine(ws, new_ws)
+ assert isinstance(combined, FooWorkspace)
|
mkdocs__mkdocs-836 | Error when using a unicode filename on Windows
I am not totally sure this is the right place, but you will tell me :)
In a mkdocs-material-project I wanted to use diacritics in the filename, like this:
```
/Kapitel
1. Einstieg
2. Übersicht
3. Etcetera
```
And such an **"Ü"** seems to break everything:
```
C:\Python27\lib\urllib.py:1303: UnicodeWarning: Unicode equal comparison failed to convert both arguments to Unicode - interpreting them as being unequal
return ''.join(map(quoter, s))
ERROR - Error building page Allgemeines\1. Richtlinien.md
Traceback (most recent call last):
File "C:\Python27\lib\runpy.py", line 162, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "C:\Python27\lib\runpy.py", line 72, in _run_code
exec code in run_globals
File "C:\Python27\Scripts\mkdocs.exe\__main__.py", line 9, in <module>
File "C:\Python27\lib\site-packages\click\core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "C:\Python27\lib\site-packages\click\core.py", line 696, in main
rv = self.invoke(ctx)
File "C:\Python27\lib\site-packages\click\core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "C:\Python27\lib\site-packages\click\core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "C:\Python27\lib\site-packages\click\core.py", line 534, in invoke
return callback(*args, **kwargs)
File "C:\Python27\lib\site-packages\mkdocs\__main__.py", line 115, in serve_command
livereload=livereload,
File "C:\Python27\lib\site-packages\mkdocs\commands\serve.py", line 78, in serve
config = builder()
File "C:\Python27\lib\site-packages\mkdocs\commands\serve.py", line 74, in builder
build(config, live_server=True, clean_site_dir=True)
File "C:\Python27\lib\site-packages\mkdocs\commands\build.py", line 289, in build
build_pages(config)
File "C:\Python27\lib\site-packages\mkdocs\commands\build.py", line 249, in build_pages
dump_json)
File "C:\Python27\lib\site-packages\mkdocs\commands\build.py", line 184, in _build_page
output_content = template.render(context)
File "C:\Python27\lib\site-packages\jinja2\environment.py", line 989, in render
return self.environment.handle_exception(exc_info, True)
File "C:\Python27\lib\site-packages\jinja2\environment.py", line 754, in handle_exception
reraise(exc_type, exc_value, tb)
File "C:\Python27\lib\site-packages\material\base.html", line 102, in top-level template code
{% include "drawer.html" %}
File "C:\Python27\lib\site-packages\material\drawer.html", line 41, in top-level template code
{% include "nav.html" %}
File "C:\Python27\lib\site-packages\material\nav.html", line 6, in top-level template code
{% include 'nav.html' %}
File "C:\Python27\lib\site-packages\material\nav.html", line 12, in top-level template code
<a class="{% if nav_item.active %}current{% endif %}" title="{{ nav_item.title }}" href="{{ nav_item.url }}">
File "C:\Python27\lib\site-packages\jinja2\environment.py", line 408, in getattr
return getattr(obj, attribute)
File "C:\Python27\lib\site-packages\mkdocs\nav.py", line 153, in url
return self.url_context.make_relative(self.abs_url)
File "C:\Python27\lib\site-packages\mkdocs\nav.py", line 105, in make_relative
return utils.path_to_url(relative_path)
File "C:\Python27\lib\site-packages\mkdocs\utils\__init__.py", line 324, in path_to_url
return pathname2url(path)
File "C:\Python27\lib\nturl2path.py", line 54, in pathname2url
return urllib.quote('/'.join(components))
File "C:\Python27\lib\urllib.py", line 1303, in quote
return ''.join(map(quoter, s))
KeyError: u'\xdc'
```
Is this already known?
| [
{
"content": "# coding: utf-8\n\n\"\"\"\nStandalone file utils.\n\nNothing in this module should have an knowledge of config or the layout\nand structure of the site and pages in the site.\n\"\"\"\n\nfrom __future__ import unicode_literals\n\nimport logging\nimport markdown\nimport os\nimport pkg_resources\nimp... | [
{
"content": "# coding: utf-8\n\n\"\"\"\nStandalone file utils.\n\nNothing in this module should have an knowledge of config or the layout\nand structure of the site and pages in the site.\n\"\"\"\n\nfrom __future__ import unicode_literals\n\nimport logging\nimport markdown\nimport os\nimport pkg_resources\nimp... | diff --git a/docs/about/release-notes.md b/docs/about/release-notes.md
index fae106a539..28681bfc87 100644
--- a/docs/about/release-notes.md
+++ b/docs/about/release-notes.md
@@ -15,8 +15,9 @@ You can determine your currently installed version using `mkdocs --version`:
## Version 0.16 (2016-02-??)
-* Add a flag (-e/--theme-dir) to specifiy theme directory with the commands
+* Add a flag (-e/--theme-dir) to specifiy theme directory with the commands
`mkdocs build` and `mkdocs serve` (#832)
+* Fixed issues with Unicode filenames under Windows and Python 2. (#833)
## Version 0.15.3 (2016-02-18)
diff --git a/mkdocs/tests/integration.py b/mkdocs/tests/integration.py
index 5c0b10eb00..dcd8633bbb 100644
--- a/mkdocs/tests/integration.py
+++ b/mkdocs/tests/integration.py
@@ -35,17 +35,20 @@
required=True)
def main(output=None):
+ print("Building themes.")
for theme in sorted(MKDOCS_THEMES):
+ print("Building theme: {0}".format(theme))
project_dir = os.path.dirname(MKDOCS_CONFIG)
out = os.path.join(output, theme)
command = ['mkdocs', 'build', '-v', '--site-dir', out, '--theme', theme]
subprocess.check_call(command, cwd=project_dir)
+ print("Building test projects.")
for project in os.listdir(TEST_PROJECTS):
-
+ print("Building test project: {0}".format(project))
project_dir = os.path.join(TEST_PROJECTS, project)
out = os.path.join(output, project)
- command = ['mkdocs', 'build', '--site-dir', out]
+ command = ['mkdocs', 'build', '-v', '--site-dir', out]
subprocess.check_call(command, cwd=project_dir)
print("Theme and integration builds are available in {0}".format(output))
diff --git a/mkdocs/tests/integration/unicode/docs/index.md b/mkdocs/tests/integration/unicode/docs/index.md
new file mode 100644
index 0000000000..15bfba3adf
--- /dev/null
+++ b/mkdocs/tests/integration/unicode/docs/index.md
@@ -0,0 +1,2 @@
+# Unicode Test Documentation 📖
+
diff --git "a/mkdocs/tests/integration/unicode/docs/\303\234bersicht.md" "b/mkdocs/tests/integration/unicode/docs/\303\234bersicht.md"
new file mode 100644
index 0000000000..da37213adb
--- /dev/null
+++ "b/mkdocs/tests/integration/unicode/docs/\303\234bersicht.md"
@@ -0,0 +1,17 @@
+# Welcome to MkDocs
+
+For full documentation visit [mkdocs.org](http://mkdocs.org).
+
+## Commands
+
+* `mkdocs new [dir-name]` - Create a new project.
+* `mkdocs serve` - Start the live-reloading docs server.
+* `mkdocs build` - Build the documentation site.
+* `mkdocs help` - Print this help message.
+
+## Project layout
+
+ mkdocs.yml # The configuration file.
+ docs/
+ index.md # The documentation homepage.
+ ... # Other markdown pages, images and other files.
diff --git "a/mkdocs/tests/integration/unicode/docs/\342\231\252.md" "b/mkdocs/tests/integration/unicode/docs/\342\231\252.md"
new file mode 100644
index 0000000000..da37213adb
--- /dev/null
+++ "b/mkdocs/tests/integration/unicode/docs/\342\231\252.md"
@@ -0,0 +1,17 @@
+# Welcome to MkDocs
+
+For full documentation visit [mkdocs.org](http://mkdocs.org).
+
+## Commands
+
+* `mkdocs new [dir-name]` - Create a new project.
+* `mkdocs serve` - Start the live-reloading docs server.
+* `mkdocs build` - Build the documentation site.
+* `mkdocs help` - Print this help message.
+
+## Project layout
+
+ mkdocs.yml # The configuration file.
+ docs/
+ index.md # The documentation homepage.
+ ... # Other markdown pages, images and other files.
diff --git a/mkdocs/tests/integration/unicode/mkdocs.yml b/mkdocs/tests/integration/unicode/mkdocs.yml
new file mode 100644
index 0000000000..c97182f51a
--- /dev/null
+++ b/mkdocs/tests/integration/unicode/mkdocs.yml
@@ -0,0 +1 @@
+site_name: My Docs
diff --git a/mkdocs/utils/__init__.py b/mkdocs/utils/__init__.py
index 9b85fc0de7..be021277d1 100644
--- a/mkdocs/utils/__init__.py
+++ b/mkdocs/utils/__init__.py
@@ -321,6 +321,8 @@ def path_to_url(path):
if os.path.sep == '/':
return path
+ if sys.version_info < (3, 0):
+ path = path.encode('utf8')
return pathname2url(path)
|
bokeh__bokeh-4805 | Update add_glyph docstring
plot.py add_glyph returns GlyphRenderer not Glyph
this tripped me up for a minute
| [
{
"content": "\"\"\" Models for representing top-level plot objects.\n\n\"\"\"\nfrom __future__ import absolute_import\n\nfrom six import string_types\nimport warnings\n\nfrom ..core.query import find\nfrom ..core import validation\nfrom ..core.validation.errors import REQUIRED_RANGE\nfrom ..core.validation.war... | [
{
"content": "\"\"\" Models for representing top-level plot objects.\n\n\"\"\"\nfrom __future__ import absolute_import\n\nfrom six import string_types\nimport warnings\n\nfrom ..core.query import find\nfrom ..core import validation\nfrom ..core.validation.errors import REQUIRED_RANGE\nfrom ..core.validation.war... | diff --git a/bokeh/models/plots.py b/bokeh/models/plots.py
index 86a04895ba0..314cf848c87 100644
--- a/bokeh/models/plots.py
+++ b/bokeh/models/plots.py
@@ -282,7 +282,7 @@ def add_glyph(self, source_or_glyph, glyph=None, **kw):
Glyph initializer.
Returns:
- Glyph
+ GlyphRenderer
'''
if glyph is not None:
|
coqui-ai__TTS-1532 | Missing `f` prefix on f-strings
Some strings looks like they're meant to be f-strings but are missing the `f` prefix meaning variable interpolation won't happen.
https://github.com/coqui-ai/TTS/blob/c410bc58ef3bd07b72ab05d29bbdc2a6df47afea/TTS/tts/layers/tacotron/attentions.py#L487
I found this issue automatically. I'm a bot. Beep Boop 🦊. See other issues I found in your repo [here](https://codereview.doctor/coqui-ai/TTS)
| [
{
"content": "import torch\nfrom scipy.stats import betabinom\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom TTS.tts.layers.tacotron.common_layers import Linear\n\n\nclass LocationLayer(nn.Module):\n \"\"\"Layers for Location Sensitive Attention\n\n Args:\n attention_dim (int):... | [
{
"content": "import torch\nfrom scipy.stats import betabinom\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom TTS.tts.layers.tacotron.common_layers import Linear\n\n\nclass LocationLayer(nn.Module):\n \"\"\"Layers for Location Sensitive Attention\n\n Args:\n attention_dim (int):... | diff --git a/TTS/tts/layers/tacotron/attentions.py b/TTS/tts/layers/tacotron/attentions.py
index 8c30a00a4a..d8a90d7201 100644
--- a/TTS/tts/layers/tacotron/attentions.py
+++ b/TTS/tts/layers/tacotron/attentions.py
@@ -484,4 +484,4 @@ def init_attn(
beta=0.9,
)
- raise RuntimeError(" [!] Given Attention Type '{attn_type}' is not exist.")
+ raise RuntimeError(f" [!] Given Attention Type '{attn_type}' is not exist.")
|
Lightning-AI__torchmetrics-959 | PSNR - Higher is better.
## 🐛 Bug
`PSNR.higher_is_better` should be `True`
### Additional context
This is a simple change, created [PR#959](https://github.com/PyTorchLightning/metrics/pull/959) with the change.
| [
{
"content": "# Copyright The PyTorch Lightning team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required ... | [
{
"content": "# Copyright The PyTorch Lightning team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required ... | diff --git a/torchmetrics/image/psnr.py b/torchmetrics/image/psnr.py
index ba01b85b533..18b89452045 100644
--- a/torchmetrics/image/psnr.py
+++ b/torchmetrics/image/psnr.py
@@ -70,7 +70,7 @@ class PeakSignalNoiseRatio(Metric):
"""
min_target: Tensor
max_target: Tensor
- higher_is_better = False
+ higher_is_better = True
def __init__(
self,
|
beeware__toga-267 | GTK+: TypeError: on_close() takes 2 positional arguments but 3 were given
When creating a new Window and then closing it a TypeError is created in Linux. Once you have created an App, calling the following will reproduce the error:
window = toga.Window()
window.app = app
window.show()
Then close the window and you get:
TypeError: on_close() takes 2 positional arguments but 3 were given
| [
{
"content": "import asyncio\nimport os\nimport signal\nimport sys\n\ntry:\n import gi\nexcept ImportError:\n # app.py is the first module that will be imported when you import toga_gtk.\n #\n # If Gtk can't be imported, it may be because we're in a virtualenv,\n # and the system python libraries... | [
{
"content": "import asyncio\nimport os\nimport signal\nimport sys\n\ntry:\n import gi\nexcept ImportError:\n # app.py is the first module that will be imported when you import toga_gtk.\n #\n # If Gtk can't be imported, it may be because we're in a virtualenv,\n # and the system python libraries... | diff --git a/src/gtk/toga_gtk/app.py b/src/gtk/toga_gtk/app.py
index 0119ea05fe..765fe10064 100644
--- a/src/gtk/toga_gtk/app.py
+++ b/src/gtk/toga_gtk/app.py
@@ -86,7 +86,7 @@
class MainWindow(Window):
_IMPL_CLASS = Gtk.ApplicationWindow
- def on_close(self, widget):
+ def on_close(self, widget, data):
pass
|
ipython__ipython-9645 | Readlinelike display of options crashes ipython terminal.
| [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"Setup script for IPython.\n\nUnder Posix environments it works like a typical setup.py script.\nUnder Windows, the command sdist is not supported, since IPython\nrequires utilities which are not available under Windows.\"\"\"\n\n#--------------... | [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"Setup script for IPython.\n\nUnder Posix environments it works like a typical setup.py script.\nUnder Windows, the command sdist is not supported, since IPython\nrequires utilities which are not available under Windows.\"\"\"\n\n#--------------... | diff --git a/setup.py b/setup.py
index 44975df60fb..4a81a4f3b58 100755
--- a/setup.py
+++ b/setup.py
@@ -196,7 +196,7 @@ def run(self):
'pickleshare',
'simplegeneric>0.8',
'traitlets>=4.2',
- 'prompt_toolkit>=1.0.1,<2.0.0',
+ 'prompt_toolkit>=1.0.3,<2.0.0',
'pygments',
]
|
PyGithub__PyGithub-557 | GitHub Integration raises "NotImplementedError Algorithm not supported"
We have working github integration code using PyGithub v1.32 that does essentially:
```python
integration = github.GithubIntegration(settings.GITHUB_INTEGRATION_ID, settings.GITHUB_INTEGRATION_PRIVATE_PEM)
inst_token = integration.get_access_token(installation_id).token
```
After upgrading to v1.34 this code raises "NotImplementedError Algorithm not supported"
I suspect it has to do with the [switch to pyjwt from python-jose](https://github.com/PyGithub/PyGithub/commit/d447eb13b9f4688a4c981ca03b1b3111fb299142)
| [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n# ########################## Copyrights and license ############################\n# #\n# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #\n# ... | [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n# ########################## Copyrights and license ############################\n# #\n# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #\n# ... | diff --git a/doc/introduction.rst b/doc/introduction.rst
index 85ff523cf4..0bf2ea9f30 100644
--- a/doc/introduction.rst
+++ b/doc/introduction.rst
@@ -31,6 +31,9 @@ This package is in the `Python Package Index
be enough. You can also clone it on `Github
<http://github.com/PyGithub/PyGithub>`__.
+If you wish to use GitHub Integrations, you'll want to be sure to install the
+'integrations' option: ``pip install PyGithub['integrations']``
+
Licensing
---------
diff --git a/setup.py b/setup.py
index 7f221382c4..ee1cb00df7 100755
--- a/setup.py
+++ b/setup.py
@@ -96,5 +96,8 @@
use_2to3=True,
install_requires=[
"pyjwt"
- ]
+ ],
+ extras_require = {
+ "integrations": ["cryptography"]
+ }
)
|
mlcommons__GaNDLF-477 | Add histology exception tests
**Is your feature request related to a problem? Please describe.**
Currently, the histology inference pipeline contains a lot of exceptions, but they aren't being tested.
**Describe the solution you'd like**
See title.
**Describe alternatives you've considered**
N.A.
**Additional context**
N.A.
| [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\nwith open(\"README.md\") as readme... | [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\nwith open(\"README.md\") as readme... | diff --git a/.github/workflows/python-test.yml b/.github/workflows/python-test.yml
index de1ee4d91..2d555dfda 100644
--- a/.github/workflows/python-test.yml
+++ b/.github/workflows/python-test.yml
@@ -22,10 +22,11 @@ jobs:
python-version: 3.8
- name: Install dependencies and package
run: |
+ sudo apt-get install libvips -y
python -m pip install --upgrade pip
+ python -m pip install wheel
+ python -m pip install pyvips
python -m pip install openvino-dev==2022.1.0
- python -m pip install torch==1.8.2+cu102 torchvision==0.9.2+cu102 torchaudio===0.8.2 -f https://download.pytorch.org/whl/lts/1.8/torch_lts.html
- $CONDA/bin/conda install -c conda-forge libvips -y
pip3 install torch==1.8.2+cpu torchvision==0.9.2+cpu torchaudio==0.8.2 -f https://download.pytorch.org/whl/lts/1.8/torch_lts.html
pip install -e .
@@ -34,7 +35,10 @@ jobs:
pytest --cov=. --cov-report=xml -k "generic"
- name: Run classification unit tests
run: |
- pytest --cov=. --cov-report=xml --cov-append -k "classification"
+ pytest --cov=. --cov-report=xml --cov-append -k "classification and not histology"
+ - name: Run classification unit tests with histology
+ run: |
+ pytest --cov=. --cov-report=xml --cov-append -k "classification and histology"
- name: Run regression unit tests
run: |
pytest --cov=. --cov-report=xml --cov-append -k "regression"
@@ -44,8 +48,8 @@ jobs:
- name: Run transunet unit tests
run: |
pytest --cov=. --cov-report=xml --cov-append -k "transunet"
-
-
+
+
- name: Upload coverage
uses: codecov/codecov-action@v1
with:
diff --git a/setup.py b/setup.py
index 1db03b25c..f71a3f2fc 100644
--- a/setup.py
+++ b/setup.py
@@ -67,7 +67,7 @@ def run(self):
"tiffslide",
"matplotlib",
"requests>=2.25.0",
- "pyvips",
+ "pyvips==2.2.1",
"pytest",
"coverage",
"pytest-cov",
diff --git a/testing/test_full.py b/testing/test_full.py
index 33afc623f..cefb198bb 100644
--- a/testing/test_full.py
+++ b/testing/test_full.py
@@ -5,6 +5,7 @@
import pandas as pd
from pydicom.data import get_testdata_file
+import pyvips as pv
from GANDLF.data.ImagesFromDataFrame import ImagesFromDataFrame
from GANDLF.utils import *
@@ -1737,8 +1738,125 @@ def test_train_inference_segmentation_histology_2d(device):
print("passed")
+def test_train_inference_classification_histology_large_2d(device):
+ print(
+ "35: Starting histology train/inference classification tests for large images to check exception handling"
+ )
+ # overwrite previous results
+ sanitize_outputDir()
+ output_dir_patches = os.path.join(outputDir, "histo_patches")
+ if os.path.isdir(output_dir_patches):
+ shutil.rmtree(output_dir_patches)
+ Path(output_dir_patches).mkdir(parents=True, exist_ok=True)
+ output_dir_patches_output = os.path.join(output_dir_patches, "histo_patches_output")
+ Path(output_dir_patches_output).mkdir(parents=True, exist_ok=True)
+ file_config_temp = os.path.join(
+ output_dir_patches, "config_patch-extraction_temp.yaml"
+ )
+ # if found in previous run, discard.
+ if os.path.exists(file_config_temp):
+ os.remove(file_config_temp)
+
+ parameters_patch = {}
+ # extracting minimal number of patches to ensure that the test does not take too long
+ parameters_patch["num_patches"] = 3
+ parameters_patch["patch_size"] = [128, 128]
+
+ with open(file_config_temp, "w") as file:
+ yaml.dump(parameters_patch, file)
+
+ # resize the image
+ input_df = pd.read_csv(inputDir + "/train_2d_histo_classification.csv")
+ for _, row in input_df.iterrows():
+ img = pv.Image.new_from_file(row["Channel_0"])
+ img_resize = img.resize(10)
+ new_filename = row["Channel_0"].replace(".tiff", "_resize.tiff")
+ row["Channel_0"] = new_filename
+ img_resize.tiffsave(new_filename)
+
+ input_df.to_csv(inputDir + "/train_2d_histo_classification_resize.csv", index=False)
+
+ patch_extraction(
+ inputDir + "/train_2d_histo_classification_resize.csv",
+ output_dir_patches_output,
+ file_config_temp,
+ )
+
+ file_for_Training = os.path.join(output_dir_patches_output, "opm_train.csv")
+ temp_df = pd.read_csv(file_for_Training)
+ temp_df.drop("Label", axis=1, inplace=True)
+ temp_df["valuetopredict"] = np.random.randint(2, size=6)
+ temp_df.to_csv(file_for_Training, index=False)
+ # read and parse csv
+ parameters = parseConfig(
+ testingDir + "/config_classification.yaml", version_check_flag=False
+ )
+ parameters["modality"] = "histo"
+ parameters["patch_size"] = 128
+ file_config_temp = os.path.join(outputDir, "config_classification_temp.yaml")
+ with open(file_config_temp, "w") as file:
+ yaml.dump(parameters, file)
+ parameters = parseConfig(file_config_temp, version_check_flag=False)
+ parameters["model"]["dimension"] = 2
+ # read and parse csv
+ training_data, parameters["headers"] = parseTrainingCSV(file_for_Training)
+ parameters["model"]["num_channels"] = 3
+ parameters["model"]["architecture"] = "densenet121"
+ parameters["model"]["norm_type"] = "none"
+ parameters["data_preprocessing"]["rgba2rgb"] = ""
+ parameters = populate_header_in_parameters(parameters, parameters["headers"])
+ parameters["nested_training"]["testing"] = 1
+ parameters["nested_training"]["validation"] = -2
+ parameters["model"]["print_summary"] = False
+ modelDir = os.path.join(outputDir, "modelDir")
+ if os.path.isdir(modelDir):
+ shutil.rmtree(modelDir)
+ Path(modelDir).mkdir(parents=True, exist_ok=True)
+ TrainingManager(
+ dataframe=training_data,
+ outputDir=modelDir,
+ parameters=parameters,
+ device=device,
+ resume=False,
+ reset=True,
+ )
+ parameters["output_dir"] = modelDir # this is in inference mode
+ # drop last subject
+ input_df.drop(index=input_df.index[-1], axis=0, inplace=True)
+ input_df.to_csv(inputDir + "/train_2d_histo_classification_resize.csv", index=False)
+ inference_data, parameters["headers"] = parseTrainingCSV(
+ inputDir + "/train_2d_histo_classification_resize.csv", train=False
+ )
+ with pytest.raises(Exception) as exc_info:
+ for model_type in all_model_type:
+ parameters["nested_training"]["testing"] = 1
+ parameters["nested_training"]["validation"] = -2
+ parameters["output_dir"] = modelDir # this is in inference mode
+ inference_data, parameters["headers"] = parseTrainingCSV(
+ inputDir + "/train_2d_histo_segmentation.csv", train=False
+ )
+ parameters["model"]["type"] = model_type
+ InferenceManager(
+ dataframe=inference_data,
+ outputDir=modelDir,
+ parameters=parameters,
+ device=device,
+ )
+ assert (
+ os.path.exists(
+ os.path.join(modelDir, input_df["SubjectID"][0], "predictions.csv")
+ )
+ is True
+ )
+
+ exception_raised = exc_info.value
+ print("Exception raised: ", exception_raised)
+
+ print("passed")
+
+
def test_train_inference_classification_histology_2d(device):
- print("35: Starting histology train/inference classification tests")
+ print("36: Starting histology train/inference classification tests")
# overwrite previous results
sanitize_outputDir()
output_dir_patches = os.path.join(outputDir, "histo_patches")
@@ -1830,7 +1948,7 @@ def test_train_inference_classification_histology_2d(device):
def test_train_segmentation_unet_layerchange_rad_2d(device):
# test case to up code coverage --> test decreasing allowed layers for unet
- print("36: Starting 2D Rad segmentation tests for normtype")
+ print("37: Starting 2D Rad segmentation tests for normtype")
# read and parse csv
# read and initialize parameters for specific data dimension
parameters = parseConfig(
@@ -1877,7 +1995,7 @@ def test_train_segmentation_unet_layerchange_rad_2d(device):
def test_train_segmentation_unetr_rad_3d(device):
- print("37: Testing UNETR for 3D segmentation")
+ print("38: Testing UNETR for 3D segmentation")
parameters = parseConfig(
testingDir + "/config_segmentation.yaml", version_check_flag=False
)
@@ -1933,7 +2051,7 @@ def test_train_segmentation_unetr_rad_3d(device):
def test_train_segmentation_unetr_rad_2d(device):
- print("38: Testing UNETR for 2D segmentation")
+ print("39: Testing UNETR for 2D segmentation")
parameters = parseConfig(
testingDir + "/config_segmentation.yaml", version_check_flag=False
)
@@ -1971,7 +2089,7 @@ def test_train_segmentation_unetr_rad_2d(device):
def test_train_segmentation_transunet_rad_2d(device):
- print("39: Testing TransUNet for 2D segmentation")
+ print("40: Testing TransUNet for 2D segmentation")
parameters = parseConfig(
testingDir + "/config_segmentation.yaml", version_check_flag=False
)
@@ -2020,7 +2138,7 @@ def test_train_segmentation_transunet_rad_2d(device):
def test_train_segmentation_transunet_rad_3d(device):
- print("40: Testing TransUNet for 3D segmentation")
+ print("41: Testing TransUNet for 3D segmentation")
parameters = parseConfig(
testingDir + "/config_segmentation.yaml", version_check_flag=False
)
|
apache__tvm-5851 | [BACKPORT-0.6][BUGFIX] Fixed process termination routine in windows
#4844
| [
{
"content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n... | [
{
"content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n... | diff --git a/python/tvm/rpc/server.py b/python/tvm/rpc/server.py
index 3fff5309c45b..3700c824d235 100644
--- a/python/tvm/rpc/server.py
+++ b/python/tvm/rpc/server.py
@@ -39,6 +39,7 @@
import time
import sys
import signal
+import platform
from .._ffi.function import register_func
from .._ffi.base import py_str
|
ietf-tools__datatracker-5409 | AD dashboard should also display trend when current == 0
### Description
https://datatracker.ietf.org/doc/ad/ with the trending arrows would benefit by displaying the trend *even* if the current value is 0.
### Code of Conduct
- [X] I agree to follow the [IETF's Code of Conduct](https://github.com/ietf-tools/.github/blob/main/CODE_OF_CONDUCT.md)
| [
{
"content": "# Copyright The IETF Trust 2009-2022, All Rights Reserved\n# -*- coding: utf-8 -*-\n#\n# Some parts Copyright (C) 2009-2010 Nokia Corporation and/or its subsidiary(-ies).\n# All rights reserved. Contact: Pasi Eronen <pasi.eronen@nokia.com>\n#\n# Redistribution and use in source and binary forms, w... | [
{
"content": "# Copyright The IETF Trust 2009-2022, All Rights Reserved\n# -*- coding: utf-8 -*-\n#\n# Some parts Copyright (C) 2009-2010 Nokia Corporation and/or its subsidiary(-ies).\n# All rights reserved. Contact: Pasi Eronen <pasi.eronen@nokia.com>\n#\n# Redistribution and use in source and binary forms, w... | diff --git a/ietf/doc/views_search.py b/ietf/doc/views_search.py
index ec540ba393..84baeda69a 100644
--- a/ietf/doc/views_search.py
+++ b/ietf/doc/views_search.py
@@ -461,7 +461,7 @@ def ad_dashboard_sort_key(doc):
def ad_workload(request):
- delta = datetime.timedelta(days=30)
+ delta = datetime.timedelta(days=120)
right_now = timezone.now()
ads = []
|
pyinstaller__pyinstaller-2347 | gi._gobject.option is not part of pygobject
The [GObject hook](https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/hook-gi.repository.GObject.py) adds a `hiddenimport` for `gi._gobject.option` however `gi/_gobject/option.py` is not part of pygobject.
This leads to the following warning when packaging a Gtk application:
```
4813 INFO: Loading module hook "hook-gi.py"...
4818 INFO: Loading module hook "hook-gi.repository.GObject.py"...
4926 INFO: Processing pre-safe import module hook gi.repository.GLib
4963 WARNING: Hidden import "gi._gobject.option" not found!
```
Browsing through the [pygobject git history](https://git.gnome.org/browse/pygobject/), I find commit [8afd7e8](https://git.gnome.org/browse/pygobject/commit/gi/_option.py?id=8afd7e880a72a44e6ea46c763bab82146fd75c96) which moved `gi/_glib/option.py` into `gi/_option.py`
Replacing the `hiddenimport` to `hiddenimports += ['gi._option', 'gi._gobject']` silences the issue. However, I do not yet understand enough about pygobject and pyinstaller to know if this is the right thing to do.
| [
{
"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2016, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.... | [
{
"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2016, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.... | diff --git a/PyInstaller/hooks/hook-gi.repository.GObject.py b/PyInstaller/hooks/hook-gi.repository.GObject.py
index 724c108563..3c9ae7a74e 100644
--- a/PyInstaller/hooks/hook-gi.repository.GObject.py
+++ b/PyInstaller/hooks/hook-gi.repository.GObject.py
@@ -19,4 +19,4 @@
binaries, datas, hiddenimports = get_gi_typelibs('GObject', '2.0')
-hiddenimports += ['gi._gobject.option', 'gi._gobject']
+hiddenimports += ['gi._gobject']
|
statsmodels__statsmodels-507 | numdifftools dependency
Original Launchpad bug 653902: https://bugs.launchpad.net/statsmodels/+bug/653902
Reported by: vincent-vincentdavis (Vincent Davis).
statsmodels/_init_.py imports tsa
Which then returns an exception from statsmodels/tsa/var.py "raise Warning("You need to install numdifftools to try out the AR model")"
Should numdifftools be a dependency for all of statsmodels ?
| [
{
"content": "\"\"\"Base Classes for Likelihood Models in time series analysis\n\nWarning: imports numdifftools\n\n\n\nCreated on Sun Oct 10 15:00:47 2010\n\nAuthor: josef-pktd\nLicense: BSD\n\n\"\"\"\n\nimport numpy as np\n\nimport numdifftools as ndt\n\nfrom statsmodels.base.model import LikelihoodModel\n\n#c... | [
{
"content": "\"\"\"Base Classes for Likelihood Models in time series analysis\n\nWarning: imports numdifftools\n\n\n\nCreated on Sun Oct 10 15:00:47 2010\n\nAuthor: josef-pktd\nLicense: BSD\n\n\"\"\"\n\nimport numpy as np\n\ntry:\n import numdifftools as ndt\nexcept:\n pass\n\nfrom statsmodels.base.model... | diff --git a/statsmodels/tsa/mlemodel.py b/statsmodels/tsa/mlemodel.py
index 1fd064294cf..698e19f5710 100644
--- a/statsmodels/tsa/mlemodel.py
+++ b/statsmodels/tsa/mlemodel.py
@@ -13,7 +13,10 @@
import numpy as np
-import numdifftools as ndt
+try:
+ import numdifftools as ndt
+except:
+ pass
from statsmodels.base.model import LikelihoodModel
|
getsentry__sentry-python-337 | Unified sentry-sdk integration does not have support to add stack trace in python logger using 'stack': True in extra dict.
Migration from raven to unified sentry sdk, affected extended functionalities to python logging provided by raven. _extra_from_record - excludes keywords 'stack' and 'data'. Is there a known workaround?
| [
{
"content": "from __future__ import absolute_import\n\nimport logging\nimport datetime\n\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.utils import (\n to_string,\n event_from_exception,\n current_stacktrace,\n capture_internal_exceptions,\n)\nfrom sentry_sdk.integrations import Integration\n\ni... | [
{
"content": "from __future__ import absolute_import\n\nimport logging\nimport datetime\n\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.utils import (\n to_string,\n event_from_exception,\n current_stacktrace,\n capture_internal_exceptions,\n)\nfrom sentry_sdk.integrations import Integration\n\ni... | diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py
index a8b02b588e..60fba0dc74 100644
--- a/sentry_sdk/integrations/logging.py
+++ b/sentry_sdk/integrations/logging.py
@@ -106,7 +106,6 @@ def _logging_to_event_level(levelname):
(
"args",
"created",
- "data",
"exc_info",
"exc_text",
"filename",
|
zulip__zulip-18598 | Pivotal integration exception
Hi,
I've added Pivotal integration and from time to time I receive those two e-mails when working in Pivotal:
I'm running ubuntu 20.04
If you need more information, I'd be happy to help.
```
Logger django.request, from module django.utils.log line 224:
Error generated by PivotalMessenger <pivotal-bot@***> (Member) on *** deployment
No stack trace available
Deployed code:
- git: None
- ZULIP_VERSION: 4.2
Request info:
- path: /api/v1/external/pivotal
- POST: {}
- REMOTE_ADDR: "35.184.18.147"
- QUERY_STRING: "api_key=******&stream=******&topic=******"
- SERVER_NAME: ""
```
```
Logger zerver.middleware.json_error_handler, from module zerver.middleware line 450:
Error generated by PivotalMessenger <pivotal-bot@***> (Member) on *** deployment
Traceback (most recent call last):
File "/usr/lib/python3.8/xml/etree/ElementTree.py", line 1693, in feed
self.parser.Parse(data, 0)
xml.parsers.expat.ExpatError: not well-formed (invalid token): line 1, column 0
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./zerver/webhooks/pivotal/view.py", line 172, in api_pivotal_webhook
subject, content = api_pivotal_webhook_v3(request, user_profile)
File "./zerver/webhooks/pivotal/view.py", line 19, in api_pivotal_webhook_v3
payload = xml_fromstring(request.body)
File "/srv/zulip-venv-cache/9d0f5ac272f4e644b222ed65b0b5a996616a215f/zulip-py3-venv/lib/python3.8/site-packages/defusedxml/common.py", line 131, in fromstring
parser.feed(text)
File "/usr/lib/python3.8/xml/etree/ElementTree.py", line 1695, in feed
self._raiseerror(v)
File "/usr/lib/python3.8/xml/etree/ElementTree.py", line 1602, in _raiseerror
raise err
File "<string>", line None
xml.etree.ElementTree.ParseError: not well-formed (invalid token): line 1, column 0
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/srv/zulip-venv-cache/9d0f5ac272f4e644b222ed65b0b5a996616a215f/zulip-py3-venv/lib/python3.8/site-packages/django/core/handlers/base.py", line 181, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/srv/zulip-venv-cache/9d0f5ac272f4e644b222ed65b0b5a996616a215f/zulip-py3-venv/lib/python3.8/site-packages/django/views/decorators/csrf.py", line 54, in wrapped_view
return view_func(*args, **kwargs)
File "./zerver/lib/request.py", line 390, in _wrapped_view_func
return view_func(request, *args, **kwargs)
File "./zerver/decorator.py", line 354, in _wrapped_func_arguments
raise err
File "./zerver/decorator.py", line 334, in _wrapped_func_arguments
return view_func(request, user_profile, *args, **kwargs)
File "./zerver/lib/request.py", line 390, in _wrapped_view_func
return view_func(request, *args, **kwargs)
File "./zerver/webhooks/pivotal/view.py", line 175, in api_pivotal_webhook
subject, content = api_pivotal_webhook_v5(request, user_profile)
File "./zerver/webhooks/pivotal/view.py", line 87, in api_pivotal_webhook_v5
story_url = primary_resources["url"]
KeyError: 'url'
Deployed code:
- git: None
- ZULIP_VERSION: 4.2
Request info:
- path: /api/v1/external/pivotal
- POST: {}
- REMOTE_ADDR: "35.184.18.147"
- QUERY_STRING: "api_key=******&stream=******&topic=******"
- SERVER_NAME: ""
```
| [
{
"content": "\"\"\"Webhooks for external integrations.\"\"\"\nimport re\nfrom typing import Any, Dict, List, Optional, Tuple\n\nimport orjson\nfrom defusedxml.ElementTree import fromstring as xml_fromstring\nfrom django.http import HttpRequest, HttpResponse\nfrom django.utils.translation import gettext as _\n\... | [
{
"content": "\"\"\"Webhooks for external integrations.\"\"\"\nimport re\nfrom typing import Any, Dict, List, Optional, Tuple\n\nimport orjson\nfrom defusedxml.ElementTree import fromstring as xml_fromstring\nfrom django.http import HttpRequest, HttpResponse\nfrom django.utils.translation import gettext as _\n\... | diff --git a/zerver/webhooks/pivotal/view.py b/zerver/webhooks/pivotal/view.py
index 8109449cb2c0e..8c372ba4e1ac4 100644
--- a/zerver/webhooks/pivotal/view.py
+++ b/zerver/webhooks/pivotal/view.py
@@ -72,6 +72,7 @@ def get_text(attrs: List[str]) -> str:
"story_delete_activity",
"story_move_into_project_activity",
"epic_update_activity",
+ "label_create_activity",
]
|
DataDog__dd-trace-py-1724 | botocore gets monkey patched before gevent when using pynamoDB
In [0.43.0 ssl libs are patched on import](https://github.com/DataDog/dd-trace-py/pull/1629) to allow `ddtrace-run` and `gevent` to exist in harmony.
`pynamodb` imports `botocore` and PynamoDB is patched by default. The result of this is that `ddtrace-run` ends up monkey patching `botocore` before `gevent` does.
I believe PynamoDB should be listed in the SSL libs that only get patched on import.
### Which version of dd-trace-py are you using?
0.43.0
### Which version of the libraries are you using?
ddtrace==0.43.0
gevent==20.9.0
greenlet==0.4.17
gunicorn==20.0.4
pynamodb==4.3.3
### How can we reproduce your problem?
1. Create new virtualenv
```
$ mkdir temp
$ cd temp
$ virtualenv .
$ . ./bin/active
```
2. Install libs
```
pip install ddtrace gunicorn[gevent] pynamodb
```
3. Create empty `app.py`
```
import time
while True:
time.sleep(1)
```
Run the failing command
```
ddtrace-run gunicorn -k gevent app
```
The following warning is displayed, which will turn into a SSL recursion error if you try and use urllib3.
```
$ ddtrace-run gunicorn -k gevent app
- DATADOG TRACER DIAGNOSTIC - Agent not reachable. Exception raised: [Errno 61] Connection refused
- DATADOG TRACER DIAGNOSTIC - Agent not reachable. Exception raised: [Errno 61] Connection refused
[2020-10-12 16:46:09 +1100] [69996] [INFO] Starting gunicorn 20.0.4
[2020-10-12 16:46:09 +1100] [69996] [INFO] Listening at: http://127.0.0.1:8000 (69996)
[2020-10-12 16:46:09 +1100] [69996] [INFO] Using worker: gevent
[2020-10-12 16:46:09 +1100] [70004] [INFO] Booting worker with pid: 70004
/private/tmp/venv/lib/python3.7/site-packages/gunicorn/workers/ggevent.py:53: MonkeyPatchWarning: Monkey-patching ssl after ssl has already been imported may lead to errors, including RecursionError on Python 3.6. It may also silently lead to incorrect behaviour on Python 3.7. Please monkey-patch earlier. See https://github.com/gevent/gevent/issues/1016. Modules that had direct imports (NOT patched): ['botocore.httpsession (/private/tmp/venv/lib/python3.7/site-packages/botocore/httpsession.py)', 'urllib3.util.ssl_ (/private/tmp/venv/lib/python3.7/site-packages/urllib3/util/ssl_.py)', 'urllib3.util (/private/tmp/venv/lib/python3.7/site-packages/urllib3/util/__init__.py)'].
monkey.patch_all()
```
Disable pynamodb tracing to fix
```
DD_TRACE_PYNAMODB_ENABLED=False ddtrace-run gunicorn -k gevent app
```
Which gives the following output
```
$ DD_TRACE_PYNAMODB_ENABLED=False ddtrace-run gunicorn -k gevent app
- DATADOG TRACER DIAGNOSTIC - Agent not reachable. Exception raised: [Errno 61] Connection refused
- DATADOG TRACER DIAGNOSTIC - Agent not reachable. Exception raised: [Errno 61] Connection refused
[2020-10-12 16:48:11 +1100] [70038] [INFO] Starting gunicorn 20.0.4
[2020-10-12 16:48:11 +1100] [70038] [INFO] Listening at: http://127.0.0.1:8000 (70038)
[2020-10-12 16:48:11 +1100] [70038] [INFO] Using worker: gevent
[2020-10-12 16:48:11 +1100] [70046] [INFO] Booting worker with pid: 70046
```
| [
{
"content": "\"\"\"Patch libraries to be automatically instrumented.\n\nIt can monkey patch supported standard libraries and third party modules.\nA patched module will automatically report spans with its default configuration.\n\nA library instrumentation can be configured (for instance, to report as another ... | [
{
"content": "\"\"\"Patch libraries to be automatically instrumented.\n\nIt can monkey patch supported standard libraries and third party modules.\nA patched module will automatically report spans with its default configuration.\n\nA library instrumentation can be configured (for instance, to report as another ... | diff --git a/ddtrace/monkey.py b/ddtrace/monkey.py
index 747bdbc599f..bafc5dca323 100644
--- a/ddtrace/monkey.py
+++ b/ddtrace/monkey.py
@@ -85,6 +85,7 @@
"requests": ("requests",),
"botocore": ("botocore",),
"elasticsearch": ("elasticsearch",),
+ "pynamodb": ("pynamodb",),
}
diff --git a/releasenotes/notes/gevent-pynamodb-fix-72ac7017e51fd4f9.yaml b/releasenotes/notes/gevent-pynamodb-fix-72ac7017e51fd4f9.yaml
new file mode 100644
index 00000000000..b37d453e767
--- /dev/null
+++ b/releasenotes/notes/gevent-pynamodb-fix-72ac7017e51fd4f9.yaml
@@ -0,0 +1,4 @@
+---
+fixes:
+ - |
+ Patch pynamodb on import to prevent patching conflicts with gevent.
diff --git a/tests/contrib/gevent/test_tracer.py b/tests/contrib/gevent/test_tracer.py
index ae285ee1494..c4e9166e771 100644
--- a/tests/contrib/gevent/test_tracer.py
+++ b/tests/contrib/gevent/test_tracer.py
@@ -457,6 +457,7 @@ def test_ddtracerun(self):
import botocore # noqa
import requests # noqa
import elasticsearch # noqa
+ import pynamodb # noqa
p = subprocess.Popen(
["ddtrace-run", "python", "tests/contrib/gevent/monkeypatch.py"],
diff --git a/tox.ini b/tox.ini
index 82344b98752..dca7302d4f7 100644
--- a/tox.ini
+++ b/tox.ini
@@ -471,6 +471,7 @@ deps =
sslmodules: botocore
sslmodules: requests
sslmodules: elasticsearch
+ sslmodules: pynamodb
starlette_contrib: httpx
starlette_contrib: pytest-asyncio
starlette_contrib: requests
|
zulip__zulip-13077 | Upgrade pip from 19.1.1 and pip-tools from 3.8.0
Followup issue from #13067. pip-tools 3.9.0 or 4.0.0 fails to resolve dependencies from Git URLs (jazzband/pip-tools#851):
`pip._internal.exceptions.DistributionNotFound: No matching distribution found for zulip==0.6.1_git (from -r requirements/common.in (line 135))`
while pip 19.2 breaks pip-tools 3.8.0 (jazzband/pip-tools#853):
`TypeError: __init__() got an unexpected keyword argument 'find_links'`
| [
{
"content": "import os\n\nZULIP_VERSION = \"2.0.4+git\"\n# Add information on number of commits and commit hash to version, if available\nzulip_git_version_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'zulip-git-version')\nif os.path.exists(zulip_git_version_file):\n with open(zulip_git_v... | [
{
"content": "import os\n\nZULIP_VERSION = \"2.0.4+git\"\n# Add information on number of commits and commit hash to version, if available\nzulip_git_version_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'zulip-git-version')\nif os.path.exists(zulip_git_version_file):\n with open(zulip_git_v... | diff --git a/requirements/dev.in b/requirements/dev.in
index 196613baae52b..6b6a824744952 100644
--- a/requirements/dev.in
+++ b/requirements/dev.in
@@ -50,7 +50,7 @@ transifex-client==0.12.5
python-digitalocean==1.14.0
# Needed for updating the locked pip dependencies
-pip-tools==3.8.0
+pip-tools==4.1.0
# zulip's linting framework - zulint
git+https://github.com/zulip/zulint@aaed679f1ad38b230090eadd3870b7682500f60c#egg=zulint==0.0.1
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 7d63ba635e3c5..9bc6dd787082d 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -102,7 +102,7 @@ phonenumberslite==8.10.15
pickleshare==0.7.5 # via ipython
pika==0.13.0
pillow==6.1.0
-pip-tools==3.8.0
+pip-tools==4.1.0
polib==1.1.0
premailer==3.5.0
prompt-toolkit==1.0.16 # via ipython
@@ -191,5 +191,5 @@ git+https://github.com/zulip/python-zulip-api.git@804501610b6a205334e71b4e441fca
git+https://github.com/zulip/python-zulip-api.git@804501610b6a205334e71b4e441fca60acf650da#egg=zulip_bots==0.6.1+git&subdirectory=zulip_bots
# The following packages are considered to be unsafe in a requirements file:
-pip==19.1.1
+pip==19.2.3
setuptools==41.0.1 # via cfn-lint, ipython, jsonschema, markdown, pyhamcrest, sphinx, zope.interface
diff --git a/requirements/pip.txt b/requirements/pip.txt
index 38d68afa0dded..0678617c8c5c4 100644
--- a/requirements/pip.txt
+++ b/requirements/pip.txt
@@ -1,4 +1,4 @@
# Dependencies for setting up pip to install our requirements.txt file.
-pip==19.1.1
+pip==19.2.3
setuptools==41.0.1
wheel==0.33.4
diff --git a/requirements/prod.txt b/requirements/prod.txt
index c10df59d0d595..3ca42a3212dca 100644
--- a/requirements/prod.txt
+++ b/requirements/prod.txt
@@ -122,5 +122,5 @@ git+https://github.com/zulip/python-zulip-api.git@804501610b6a205334e71b4e441fca
git+https://github.com/zulip/python-zulip-api.git@804501610b6a205334e71b4e441fca60acf650da#egg=zulip_bots==0.6.1+git&subdirectory=zulip_bots
# The following packages are considered to be unsafe in a requirements file:
-pip==19.1.1
+pip==19.2.3
setuptools==41.0.1 # via ipython, markdown
diff --git a/requirements/unupgradable.json b/requirements/unupgradable.json
index bbd10d0a0d749..0e67dfc624a65 100644
--- a/requirements/unupgradable.json
+++ b/requirements/unupgradable.json
@@ -14,12 +14,6 @@
"transifex-client": {
"issue": "https://github.com/zulip/zulip/issues/8914"
},
- "pip": {
- "issue": "https://github.com/zulip/zulip/issues/13067"
- },
- "pip-tools": {
- "issue": "https://github.com/zulip/zulip/issues/13067"
- },
"defusedxml": {
"issue": "https://github.com/zulip/zulip/issues/12191"
},
diff --git a/version.py b/version.py
index 5c3901c63af96..9b2112eac63e3 100644
--- a/version.py
+++ b/version.py
@@ -26,4 +26,4 @@
# historical commits sharing the same major version, in which case a
# minor version bump suffices.
-PROVISION_VERSION = '49.2'
+PROVISION_VERSION = '49.3'
|
wemake-services__wemake-python-styleguide-195 | Fix documentation main page's header
The header is gone:
<img width="1032" alt="2018-10-03 0 18 01" src="https://user-images.githubusercontent.com/4660275/46377643-d0ce1080-c6a1-11e8-950b-d2d0c515dee1.png">
| [
{
"content": "# -*- coding: utf-8 -*-\n\nimport ast\nfrom typing import Optional\n\nfrom wemake_python_styleguide.constants import MAGIC_NUMBERS_WHITELIST\nfrom wemake_python_styleguide.violations.best_practices import (\n MagicNumberViolation,\n)\nfrom wemake_python_styleguide.visitors.base import BaseNodeV... | [
{
"content": "# -*- coding: utf-8 -*-\n\nimport ast\nfrom typing import Optional\n\nfrom wemake_python_styleguide.constants import MAGIC_NUMBERS_WHITELIST\nfrom wemake_python_styleguide.violations.best_practices import (\n MagicNumberViolation,\n)\nfrom wemake_python_styleguide.visitors.base import BaseNodeV... | diff --git a/CHANGELOG.md b/CHANGELOG.md
index ec404f83a..94e94b25f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -9,6 +9,10 @@ We used to have incremental versioning before `0.1.0`.
### Features
- Now we are counting `async` function as a module member
+- We now forbid to use `credits()` builtin function
+- We now check for `async with` and `async for` nesting level
+- We now count `async` methods as method for classes complexity check
+- We now count `async` functions as functions for module complexity check
### Misc
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 1d82e9987..f2a01e85c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -77,9 +77,10 @@ Before submitting your code please do the following steps:
2. Add any changes you want
3. Adds tests for the new changes
4. Edit documentation if you have changed something significant
-5. Run `pytest` again to make sure it is still working
-6. Run `mypy` to ensure that types are correct
-7. Run `doc8` to ensure that docs are correct
+5. Update `CHANGELOG.md` with a quick summary of your changes
+6. Run `pytest` again to make sure it is still working
+7. Run `mypy` to ensure that types are correct
+8. Run `doc8` to ensure that docs are correct
## Other help
@@ -87,4 +88,4 @@ Before submitting your code please do the following steps:
You can contribute by spreading a word about this library.
It would also be a huge contribution to write
a short article on how you are using this project.
-What are your best-practices?
+You can also share your best practices with us.
diff --git a/README.md b/README.md
index d199b9498..aa6043abb 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,7 @@
-# wemake-python-styleguide [](https://travis-ci.org/wemake-services/wemake-python-styleguide) [](https://ci.appveyor.com/project/wemake-services/wemake-python-styleguide)
+# wemake-python-styleguide
[](https://wemake.services)
+[](https://travis-ci.org/wemake-services/wemake-python-styleguide) [](https://ci.appveyor.com/project/wemake-services/wemake-python-styleguide)
[](https://coveralls.io/github/wemake-services/wemake-python-styleguide?branch=master)
[](https://badge.fury.io/py/wemake-python-styleguide)
[](https://pypi.org/project/wemake-python-styleguide/)
@@ -88,7 +89,7 @@ We are here not to:
## Contributing
-See [CONTRIBUTING.md](https://github.com/wemake-services/wemake-python-styleguide/blob/master/CONTRIBUTING.md) file if you want to contribute.
+See ["Contributing" section](https://wemake-python-styleguide.readthedocs.io/en/latest/_pages/contributing.html) file in the docs if you want to contribute.
You can also check which [issues need some help](https://github.com/wemake-services/wemake-python-styleguide/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22) right now.
diff --git a/docs/_pages/api.rst b/docs/_pages/api.rst
index ebd29cd4a..ec7007c03 100644
--- a/docs/_pages/api.rst
+++ b/docs/_pages/api.rst
@@ -1,13 +1,12 @@
-API Reference
+Internal Docs
=============
-Internal documentation.
-
Here you can find:
1. How our development process works
2. How to contribute to the project
3. How to write new rules
+4. How our internal API looks like
This information will also be helpful
if you would like to create our own ``flake8`` plugin.
@@ -23,16 +22,20 @@ where we specify all technical details about our workflow and tools.
And finally you will need to go through the API reference.
+Contributing
+------------
+
.. toctree::
:maxdepth: 2
- :caption: Meta:
glossary.rst
contributing.rst
+API Reference
+-------------
+
.. toctree::
:maxdepth: 2
- :caption: API Reference:
checker.rst
visitors/base.rst
diff --git a/docs/_pages/glossary.rst b/docs/_pages/glossary.rst
index cbba02a65..c0275b33d 100644
--- a/docs/_pages/glossary.rst
+++ b/docs/_pages/glossary.rst
@@ -3,6 +3,9 @@
Glossary
========
+First of all, we should speak the same language.
+Here we collect all the specific terms that are used in this project.
+
.. glossary::
plugin
diff --git a/docs/index.rst b/docs/index.rst
index d1ee9cebd..310b5d31b 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,9 +1,5 @@
.. mdinclude:: ../README.md
-
-User guide
------------
-
.. toctree::
:maxdepth: 2
:caption: Userguide:
@@ -13,21 +9,13 @@ User guide
_pages/constants.rst
_pages/options/config.rst
-
-Internal docs
--------------
-
.. toctree::
- :maxdepth: 1
+ :maxdepth: 2
:caption: API:
:hidden:
_pages/api.rst
-
-Changelog
----------
-
.. toctree::
:maxdepth: 1
:caption: Changelog:
diff --git a/wemake_python_styleguide/visitors/ast/numbers.py b/wemake_python_styleguide/visitors/ast/numbers.py
index 30a8859b9..789055b5e 100644
--- a/wemake_python_styleguide/visitors/ast/numbers.py
+++ b/wemake_python_styleguide/visitors/ast/numbers.py
@@ -27,6 +27,7 @@ class MagicNumberVisitor(BaseNodeVisitor):
ast.Tuple,
)
+ # TODO: make consistent naming rules for class attributes:
_PROXY_PARENTS = (
ast.UnaryOp,
)
|
streamlit__streamlit-7454 | A header with Japanese text has no anchor link.
### Summary
I found that a header with Japanese text has no anchor link.
### Steps to reproduce
Code snippet:
```
import streamlit as st
st.header("セクション")
```
1. Run code snippet above.
2. Check if the header has anchor link or not.
**Expected behavior:**
The header ("セクション") has anchor link.
**Actual behavior:**
The header ("セクション") has no anchor link.
### Is this a regression?
No
### Debug info
- Streamlit version: Streamlit, version 1.10.0
- Python version: Python 3.8.10
- Using Conda
- OS version: Ubuntu 20.04.4 LTS
- Browser version: Chrome / Version 104.0.5112.101 (Official Build) (x86_64)
### Additional information
A header with Korean text or Chinese text also has no anchor link.
| [
{
"content": "# Copyright (c) Streamlit Inc. (2018-2022) Snowflake Inc. (2022)\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2... | [
{
"content": "# Copyright (c) Streamlit Inc. (2018-2022) Snowflake Inc. (2022)\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2... | diff --git a/e2e/scripts/st_title.py b/e2e/scripts/st_title.py
index d926ffed0a25..6013875100ae 100644
--- a/e2e/scripts/st_title.py
+++ b/e2e/scripts/st_title.py
@@ -16,3 +16,6 @@
st.title("This title is awesome!")
st.title("This title is awesome too!", anchor="awesome-title")
+
+st.title("日本語タイトル")
+st.title("その他の邦題", anchor="アンカー")
diff --git a/e2e/specs/st_title.spec.js b/e2e/specs/st_title.spec.js
index 7b57c4704d71..e6c1a68c7b4e 100644
--- a/e2e/specs/st_title.spec.js
+++ b/e2e/specs/st_title.spec.js
@@ -19,14 +19,17 @@ describe("st.title", () => {
cy.loadApp("http://localhost:3000/");
});
- it("displays correct number of elements", () => {
- cy.get(".element-container .stMarkdown h1").should("have.length", 2);
+ it("displays correct number of elements & anchor links", () => {
+ cy.get(".element-container .stMarkdown h1").should("have.length", 4);
+ cy.get(".element-container .stMarkdown h1 a").should("have.length", 4);
});
it("displays a title", () => {
cy.get(".element-container .stMarkdown h1").then(els => {
expect(els[0].textContent).to.eq("This title is awesome!");
expect(els[1].textContent).to.eq("This title is awesome too!");
+ expect(els[2].textContent).to.eq("日本語タイトル");
+ expect(els[3].textContent).to.eq("その他の邦題");
});
});
@@ -34,6 +37,8 @@ describe("st.title", () => {
cy.get(".element-container .stMarkdown h1").then(els => {
cy.wrap(els[0]).should("have.attr", "id", "this-title-is-awesome");
cy.wrap(els[1]).should("have.attr", "id", "awesome-title");
+ cy.wrap(els[2]).should("have.attr", "id", "d3b04b7a");
+ cy.wrap(els[3]).should("have.attr", "id", "アンカー");
});
});
});
diff --git a/frontend/lib/src/components/shared/StreamlitMarkdown/StreamlitMarkdown.tsx b/frontend/lib/src/components/shared/StreamlitMarkdown/StreamlitMarkdown.tsx
index 33529d0a9416..d033f28b6f38 100644
--- a/frontend/lib/src/components/shared/StreamlitMarkdown/StreamlitMarkdown.tsx
+++ b/frontend/lib/src/components/shared/StreamlitMarkdown/StreamlitMarkdown.tsx
@@ -53,6 +53,7 @@ import {
} from "./styled-components"
import "katex/dist/katex.min.css"
+import xxhash from "xxhashjs"
import StreamlitSyntaxHighlighter from "@streamlit/lib/src/components/elements/CodeBlock/StreamlitSyntaxHighlighter"
export enum Tags {
@@ -101,12 +102,21 @@ export interface Props {
* Splits the string on non-alphanumeric characters, and joins with a dash.
*/
export function createAnchorFromText(text: string | null): string {
- const newAnchor = text
- ?.toLowerCase()
- .split(/[^A-Za-z0-9]/)
- .filter(Boolean)
- .join("-")
- return newAnchor || ""
+ let newAnchor = ""
+ // Check if the text is valid ASCII characters - necessary for fully functional anchors (issue #5291)
+ const isASCII = text && /^[\x00-\x7F]*$/.test(text)
+
+ if (isASCII) {
+ newAnchor = text
+ ?.toLowerCase()
+ .split(/[^\p{L}\p{N}]+/gu) // split on non-alphanumeric characters
+ .filter(Boolean) // filter out falsy values using Boolean constructor
+ .join("-")
+ } else if (text) {
+ // if the text is not valid ASCII, use a hash of the text
+ newAnchor = xxhash.h32(text, 0xabcd).toString(16)
+ }
+ return newAnchor
}
// Note: React markdown limits hrefs to specific protocols ('http', 'https',
|
pwndbg__pwndbg-628 | Don't activate the IDA view when stepping in the pwndbg
<!--
Before reporting a new issue, make sure that we do not have any duplicates already open.
If there is one it might be good to take part in the discussion there.
Please make sure you have checked that the issue persists on LATEST pwndbg version.
Below is a template for BUG REPORTS.
Don't include it if this is a FEATURE REQUEST.
-->
### Description
<!--
Briefly describe the problem you are having in a few paragraphs.
-->
By default the pwndbg will execute Jump(ea) function when we step to a new instruction. However the idc.Jump(ea) function calls the
`inline bool jumpto(ea_t ea, int opnum=-1, int uijmp_flags=UIJMP_ACTIVATE)`
internally with the default parameter. You can see the UIJMP_ACTIVATE is the default flag and this will instruct the IDA to activate the target view, which will grab the focus (at least on Windows)
Because of this, every time I step in the pwndbg, the focus will shifted into IDA and I have to focus back to pwndbg manually which is quite annoying
### Steps to reproduce
<!--
What do we have to do to reproduce the problem?
If this is connected to particular C/asm code,
please provide the smallest C code that reproduces the issue.
-->
Use the pwndbg with IDA Pro and then do stepping in pwndbg
### My setup
<!--
Show us your gdb/python/pwndbg/OS/IDA Pro version (depending on your case).
NOTE: We are currently supporting only Ubuntu installations.
It is known that pwndbg is not fully working e.g. on Arch Linux (the heap stuff is not working there).
If you would like to change this situation - help us improving pwndbg and supporting other distros!
This can be displayed in pwndbg through `version` command.
If it is somehow unavailable, use:
* `show version` - for gdb
* `py import sys; print(sys.version)` - for python
* pwndbg version/git commit id
-->
newest pwndbg with IDA Pro 7.2 on Windows
### Possible Fix
We can change the L230 of pwndbg/ida.py
from
```return _ida.Jump(addr)```
to
```return _ida.jumpto(addr, -1, 0)```
which means we manually set the flag to 0, instead of UIJMP_ACTIVATE.
| [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nTalks to an XMLRPC server running inside of an active IDA Pro instance,\nin order to query it about the database. Allows symbol resolution and\ninteractive debugging.\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import di... | [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nTalks to an XMLRPC server running inside of an active IDA Pro instance,\nin order to query it about the database. Allows symbol resolution and\ninteractive debugging.\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import di... | diff --git a/pwndbg/ida.py b/pwndbg/ida.py
index 514a98ad14f..775b340329f 100644
--- a/pwndbg/ida.py
+++ b/pwndbg/ida.py
@@ -227,7 +227,8 @@ def here():
@withIDA
@takes_address
def Jump(addr):
- return _ida.Jump(addr)
+ # uses C++ api instead of idc one to avoid activating the IDA window
+ return _ida.jumpto(addr, -1, 0)
@withIDA
|
certbot__certbot-9331 | Remove the third-party `mock` dependency
>Functions in certbot.tests.util were previously using the third party mock module if it was available for backwards compatibiliy. This behavior will be removed and unittest.mock from the standard library will always be used instead.
e.g. https://github.com/certbot/certbot/blob/5c111d0bd1206d864d7cb93754e101f6073bc669/certbot/certbot/tests/util.py#L38-L50
| [
{
"content": "import codecs\nimport os\nimport re\nimport sys\n\nfrom pkg_resources import parse_version\nfrom setuptools import __version__ as setuptools_version\nfrom setuptools import find_packages\nfrom setuptools import setup\n\nmin_setuptools_version='41.6.0'\n# This conditional isn't necessary, but it pr... | [
{
"content": "import codecs\nimport os\nimport re\nimport sys\n\nfrom pkg_resources import parse_version\nfrom setuptools import __version__ as setuptools_version\nfrom setuptools import find_packages\nfrom setuptools import setup\n\nmin_setuptools_version='41.6.0'\n# This conditional isn't necessary, but it pr... | diff --git a/.azure-pipelines/templates/jobs/extended-tests-jobs.yml b/.azure-pipelines/templates/jobs/extended-tests-jobs.yml
index 7c586ee5bca..0f732e6c909 100644
--- a/.azure-pipelines/templates/jobs/extended-tests-jobs.yml
+++ b/.azure-pipelines/templates/jobs/extended-tests-jobs.yml
@@ -18,8 +18,6 @@ jobs:
PYTHON_VERSION: 3.7
TOXENV: py37
CERTBOT_NO_PIN: 1
- linux-external-mock:
- TOXENV: external-mock
linux-boulder-v2-integration-certbot-oldest:
PYTHON_VERSION: 3.7
TOXENV: integration-certbot-oldest
diff --git a/certbot-apache/tests/augeasnode_test.py b/certbot-apache/tests/augeasnode_test.py
index 1e11b5eb3f4..591634d359c 100644
--- a/certbot-apache/tests/augeasnode_test.py
+++ b/certbot-apache/tests/augeasnode_test.py
@@ -1,13 +1,9 @@
"""Tests for AugeasParserNode classes"""
from typing import List
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
-
import os
import util
+from unittest import mock
from certbot import errors
diff --git a/certbot-apache/tests/autohsts_test.py b/certbot-apache/tests/autohsts_test.py
index 664d791bd78..70ed2ca1a6a 100644
--- a/certbot-apache/tests/autohsts_test.py
+++ b/certbot-apache/tests/autohsts_test.py
@@ -2,11 +2,7 @@
"""Test for certbot_apache._internal.configurator AutoHSTS functionality"""
import re
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot import errors
from certbot_apache._internal import constants
diff --git a/certbot-apache/tests/centos_test.py b/certbot-apache/tests/centos_test.py
index c9a82046629..cc295266fd6 100644
--- a/certbot-apache/tests/centos_test.py
+++ b/certbot-apache/tests/centos_test.py
@@ -1,10 +1,6 @@
"""Test for certbot_apache._internal.configurator for Centos overrides"""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot import errors
from certbot.compat import filesystem
diff --git a/certbot-apache/tests/configurator_reverter_test.py b/certbot-apache/tests/configurator_reverter_test.py
index 72b8fe2bd0d..fe0dfb39dff 100644
--- a/certbot-apache/tests/configurator_reverter_test.py
+++ b/certbot-apache/tests/configurator_reverter_test.py
@@ -1,11 +1,7 @@
"""Test for certbot_apache._internal.configurator implementations of reverter"""
import shutil
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot import errors
import util
diff --git a/certbot-apache/tests/configurator_test.py b/certbot-apache/tests/configurator_test.py
index 566907506e3..0978b302e29 100644
--- a/certbot-apache/tests/configurator_test.py
+++ b/certbot-apache/tests/configurator_test.py
@@ -5,11 +5,7 @@
import socket
import tempfile
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from acme import challenges
from certbot import achallenges
diff --git a/certbot-apache/tests/debian_test.py b/certbot-apache/tests/debian_test.py
index 2bbf403124f..facc6510798 100644
--- a/certbot-apache/tests/debian_test.py
+++ b/certbot-apache/tests/debian_test.py
@@ -1,11 +1,7 @@
"""Test for certbot_apache._internal.configurator for Debian overrides"""
import shutil
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot import errors
from certbot.compat import os
diff --git a/certbot-apache/tests/display_ops_test.py b/certbot-apache/tests/display_ops_test.py
index 50ab6bfc723..26927ffadba 100644
--- a/certbot-apache/tests/display_ops_test.py
+++ b/certbot-apache/tests/display_ops_test.py
@@ -1,10 +1,6 @@
"""Test certbot_apache._internal.display_ops."""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot import errors
from certbot.display import util as display_util
diff --git a/certbot-apache/tests/dualnode_test.py b/certbot-apache/tests/dualnode_test.py
index 83a5729a55f..a3e28d09e34 100644
--- a/certbot-apache/tests/dualnode_test.py
+++ b/certbot-apache/tests/dualnode_test.py
@@ -1,10 +1,6 @@
"""Tests for DualParserNode implementation"""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot_apache._internal import assertions
from certbot_apache._internal import augeasparser
diff --git a/certbot-apache/tests/entrypoint_test.py b/certbot-apache/tests/entrypoint_test.py
index 2a269441535..0b9644f09a4 100644
--- a/certbot-apache/tests/entrypoint_test.py
+++ b/certbot-apache/tests/entrypoint_test.py
@@ -1,10 +1,6 @@
"""Test for certbot_apache._internal.entrypoint for override class resolution"""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot_apache._internal import configurator
from certbot_apache._internal import entrypoint
diff --git a/certbot-apache/tests/fedora_test.py b/certbot-apache/tests/fedora_test.py
index fca3c4ba451..4ff704aafe1 100644
--- a/certbot-apache/tests/fedora_test.py
+++ b/certbot-apache/tests/fedora_test.py
@@ -1,10 +1,6 @@
"""Test for certbot_apache._internal.configurator for Fedora 29+ overrides"""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot import errors
from certbot.compat import filesystem
diff --git a/certbot-apache/tests/gentoo_test.py b/certbot-apache/tests/gentoo_test.py
index 25f9e929bd2..4df46e70f9f 100644
--- a/certbot-apache/tests/gentoo_test.py
+++ b/certbot-apache/tests/gentoo_test.py
@@ -1,10 +1,6 @@
"""Test for certbot_apache._internal.configurator for Gentoo overrides"""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot import errors
from certbot.compat import filesystem
diff --git a/certbot-apache/tests/http_01_test.py b/certbot-apache/tests/http_01_test.py
index 65dfb6344b4..fe5b69b33e9 100644
--- a/certbot-apache/tests/http_01_test.py
+++ b/certbot-apache/tests/http_01_test.py
@@ -2,11 +2,7 @@
import unittest
import errno
from typing import List
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from acme import challenges
from certbot import achallenges
diff --git a/certbot-apache/tests/parser_test.py b/certbot-apache/tests/parser_test.py
index 31a769ddd0d..89633ae4774 100644
--- a/certbot-apache/tests/parser_test.py
+++ b/certbot-apache/tests/parser_test.py
@@ -1,11 +1,7 @@
"""Tests for certbot_apache._internal.parser."""
import shutil
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot import errors
from certbot.compat import os
diff --git a/certbot-apache/tests/parsernode_configurator_test.py b/certbot-apache/tests/parsernode_configurator_test.py
index ebeda3c37ba..6c153acc4f8 100644
--- a/certbot-apache/tests/parsernode_configurator_test.py
+++ b/certbot-apache/tests/parsernode_configurator_test.py
@@ -1,10 +1,6 @@
"""Tests for ApacheConfigurator for AugeasParserNode classes"""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
import util
diff --git a/certbot-apache/tests/util.py b/certbot-apache/tests/util.py
index a4191b3fec8..cf97d9e7a18 100644
--- a/certbot-apache/tests/util.py
+++ b/certbot-apache/tests/util.py
@@ -4,11 +4,7 @@
import augeas
import josepy as jose
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot.compat import os
from certbot.plugins import common
diff --git a/certbot-dns-cloudflare/tests/dns_cloudflare_test.py b/certbot-dns-cloudflare/tests/dns_cloudflare_test.py
index 2b182783103..cd73adc8fa0 100644
--- a/certbot-dns-cloudflare/tests/dns_cloudflare_test.py
+++ b/certbot-dns-cloudflare/tests/dns_cloudflare_test.py
@@ -1,12 +1,9 @@
"""Tests for certbot_dns_cloudflare._internal.dns_cloudflare."""
import unittest
+from unittest import mock
import CloudFlare
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from certbot import errors
from certbot.compat import os
diff --git a/certbot-dns-digitalocean/tests/dns_digitalocean_test.py b/certbot-dns-digitalocean/tests/dns_digitalocean_test.py
index 4683893e80e..8fdee38f37d 100644
--- a/certbot-dns-digitalocean/tests/dns_digitalocean_test.py
+++ b/certbot-dns-digitalocean/tests/dns_digitalocean_test.py
@@ -1,12 +1,9 @@
"""Tests for certbot_dns_digitalocean._internal.dns_digitalocean."""
import unittest
+from unittest import mock
import digitalocean
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from certbot import errors
from certbot.compat import os
diff --git a/certbot-dns-dnsimple/tests/dns_dnsimple_test.py b/certbot-dns-dnsimple/tests/dns_dnsimple_test.py
index fc3dc5b1f26..0e28f43b2cf 100644
--- a/certbot-dns-dnsimple/tests/dns_dnsimple_test.py
+++ b/certbot-dns-dnsimple/tests/dns_dnsimple_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot_dns_dnsimple._internal.dns_dnsimple."""
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from requests.exceptions import HTTPError
from certbot.compat import os
diff --git a/certbot-dns-dnsmadeeasy/tests/dns_dnsmadeeasy_test.py b/certbot-dns-dnsmadeeasy/tests/dns_dnsmadeeasy_test.py
index a04716d95f1..46f5895a871 100644
--- a/certbot-dns-dnsmadeeasy/tests/dns_dnsmadeeasy_test.py
+++ b/certbot-dns-dnsmadeeasy/tests/dns_dnsmadeeasy_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot_dns_dnsmadeeasy._internal.dns_dnsmadeeasy."""
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from requests.exceptions import HTTPError
from certbot.compat import os
diff --git a/certbot-dns-gehirn/tests/dns_gehirn_test.py b/certbot-dns-gehirn/tests/dns_gehirn_test.py
index 1310f74ca9b..b982e3e1b7a 100644
--- a/certbot-dns-gehirn/tests/dns_gehirn_test.py
+++ b/certbot-dns-gehirn/tests/dns_gehirn_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot_dns_gehirn._internal.dns_gehirn."""
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from requests.exceptions import HTTPError
from certbot.compat import os
diff --git a/certbot-dns-google/tests/dns_google_test.py b/certbot-dns-google/tests/dns_google_test.py
index b6f63a937f9..27e8b1a653c 100644
--- a/certbot-dns-google/tests/dns_google_test.py
+++ b/certbot-dns-google/tests/dns_google_test.py
@@ -6,10 +6,8 @@
from googleapiclient.errors import Error
from googleapiclient.http import HttpMock
from httplib2 import ServerNotFoundError
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+
+from unittest import mock
from certbot import errors
from certbot.compat import os
diff --git a/certbot-dns-linode/tests/dns_linode_test.py b/certbot-dns-linode/tests/dns_linode_test.py
index d0d6ceb039f..c227ef4b5db 100644
--- a/certbot-dns-linode/tests/dns_linode_test.py
+++ b/certbot-dns-linode/tests/dns_linode_test.py
@@ -1,11 +1,7 @@
"""Tests for certbot_dns_linode._internal.dns_linode."""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot import errors
from certbot.compat import os
diff --git a/certbot-dns-luadns/tests/dns_luadns_test.py b/certbot-dns-luadns/tests/dns_luadns_test.py
index 7592e2323bd..3c1ac68418d 100644
--- a/certbot-dns-luadns/tests/dns_luadns_test.py
+++ b/certbot-dns-luadns/tests/dns_luadns_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot_dns_luadns._internal.dns_luadns."""
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from requests.exceptions import HTTPError
from certbot.compat import os
diff --git a/certbot-dns-nsone/tests/dns_nsone_test.py b/certbot-dns-nsone/tests/dns_nsone_test.py
index 3754f98114b..13ea09b3df8 100644
--- a/certbot-dns-nsone/tests/dns_nsone_test.py
+++ b/certbot-dns-nsone/tests/dns_nsone_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot_dns_nsone._internal.dns_nsone."""
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from requests.exceptions import HTTPError
from certbot.compat import os
diff --git a/certbot-dns-ovh/tests/dns_ovh_test.py b/certbot-dns-ovh/tests/dns_ovh_test.py
index 7f93967eb59..7eb767b7007 100644
--- a/certbot-dns-ovh/tests/dns_ovh_test.py
+++ b/certbot-dns-ovh/tests/dns_ovh_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot_dns_ovh._internal.dns_ovh."""
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from requests.exceptions import HTTPError
from certbot.compat import os
diff --git a/certbot-dns-rfc2136/tests/dns_rfc2136_test.py b/certbot-dns-rfc2136/tests/dns_rfc2136_test.py
index d0434aef558..1f91d3cb666 100644
--- a/certbot-dns-rfc2136/tests/dns_rfc2136_test.py
+++ b/certbot-dns-rfc2136/tests/dns_rfc2136_test.py
@@ -1,14 +1,11 @@
"""Tests for certbot_dns_rfc2136._internal.dns_rfc2136."""
import unittest
+from unittest import mock
import dns.flags
import dns.rcode
import dns.tsig
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from certbot import errors
from certbot.compat import os
diff --git a/certbot-dns-route53/tests/dns_route53_test.py b/certbot-dns-route53/tests/dns_route53_test.py
index 69b6b115d4c..bdc70e04887 100644
--- a/certbot-dns-route53/tests/dns_route53_test.py
+++ b/certbot-dns-route53/tests/dns_route53_test.py
@@ -1,13 +1,10 @@
"""Tests for certbot_dns_route53._internal.dns_route53.Authenticator"""
import unittest
+from unittest import mock
from botocore.exceptions import ClientError
from botocore.exceptions import NoCredentialsError
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from certbot import errors
from certbot.compat import os
diff --git a/certbot-dns-sakuracloud/tests/dns_sakuracloud_test.py b/certbot-dns-sakuracloud/tests/dns_sakuracloud_test.py
index 1c64df3729b..a1abf7b783a 100644
--- a/certbot-dns-sakuracloud/tests/dns_sakuracloud_test.py
+++ b/certbot-dns-sakuracloud/tests/dns_sakuracloud_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot_dns_sakuracloud._internal.dns_sakuracloud."""
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from requests.exceptions import HTTPError
from certbot.compat import os
diff --git a/certbot-nginx/tests/configurator_test.py b/certbot-nginx/tests/configurator_test.py
index a182f789a5c..916dfe3f54d 100644
--- a/certbot-nginx/tests/configurator_test.py
+++ b/certbot-nginx/tests/configurator_test.py
@@ -1,10 +1,7 @@
"""Test for certbot_nginx._internal.configurator."""
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
import OpenSSL
from acme import challenges
diff --git a/certbot-nginx/tests/http_01_test.py b/certbot-nginx/tests/http_01_test.py
index b9917af3577..05be062029e 100644
--- a/certbot-nginx/tests/http_01_test.py
+++ b/certbot-nginx/tests/http_01_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot_nginx._internal.http_01"""
import unittest
+from unittest import mock
import josepy as jose
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
from acme import challenges
from certbot import achallenges
diff --git a/certbot-nginx/tests/parser_obj_test.py b/certbot-nginx/tests/parser_obj_test.py
index 4d1f2527743..60ff1c97550 100644
--- a/certbot-nginx/tests/parser_obj_test.py
+++ b/certbot-nginx/tests/parser_obj_test.py
@@ -1,11 +1,7 @@
""" Tests for functions and classes in parser_obj.py """
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
from certbot_nginx._internal.parser_obj import COMMENT_BLOCK
from certbot_nginx._internal.parser_obj import parse_raw
diff --git a/certbot-nginx/tests/test_util.py b/certbot-nginx/tests/test_util.py
index 6cc701f42c8..1ac649318bf 100644
--- a/certbot-nginx/tests/test_util.py
+++ b/certbot-nginx/tests/test_util.py
@@ -4,10 +4,7 @@
import tempfile
import josepy as jose
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
+from unittest import mock
import pkg_resources
from certbot import util
diff --git a/certbot/certbot/plugins/dns_test_common.py b/certbot/certbot/plugins/dns_test_common.py
index a2ab84dcb4e..65c9cc2c81d 100644
--- a/certbot/certbot/plugins/dns_test_common.py
+++ b/certbot/certbot/plugins/dns_test_common.py
@@ -5,6 +5,7 @@
import configobj
import josepy as jose
+from unittest import mock
from acme import challenges
from certbot import achallenges
@@ -19,12 +20,6 @@
Protocol = object
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
-
-
DOMAIN = 'example.com'
KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem"))
diff --git a/certbot/certbot/plugins/dns_test_common_lexicon.py b/certbot/certbot/plugins/dns_test_common_lexicon.py
index 01f4c6d619f..3710404046b 100644
--- a/certbot/certbot/plugins/dns_test_common_lexicon.py
+++ b/certbot/certbot/plugins/dns_test_common_lexicon.py
@@ -1,6 +1,7 @@
"""Base test class for DNS authenticators built on Lexicon."""
from typing import Any
from typing import TYPE_CHECKING
+from unittest import mock
from unittest.mock import MagicMock
import josepy as jose
@@ -14,10 +15,6 @@
from certbot.plugins.dns_test_common import _AuthenticatorCallableTestCase
from certbot.tests import util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
if TYPE_CHECKING:
from typing_extensions import Protocol
else:
diff --git a/certbot/certbot/tests/util.py b/certbot/certbot/tests/util.py
index dbff31a14de..0ef5f654c14 100644
--- a/certbot/certbot/tests/util.py
+++ b/certbot/certbot/tests/util.py
@@ -16,7 +16,7 @@
from typing import List
from typing import Optional
import unittest
-import warnings
+from unittest import mock
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
@@ -35,20 +35,6 @@
from certbot.display import util as display_util
from certbot.plugins import common
-try:
- # When we remove this deprecated import, we should also remove the
- # "external-mock" test environment and the mock dependency listed in
- # tools/pinning/pyproject.toml.
- import mock
- warnings.warn(
- "The external mock module is being used for backwards compatibility "
- "since it is available, however, future versions of Certbot's tests will "
- "use unittest.mock. Be sure to update your code accordingly.",
- PendingDeprecationWarning
- )
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
-
class DummyInstaller(common.Installer):
"""Dummy installer plugin for test purpose."""
diff --git a/certbot/setup.py b/certbot/setup.py
index 600087d7f27..8ea0b0e6f30 100644
--- a/certbot/setup.py
+++ b/certbot/setup.py
@@ -90,7 +90,6 @@ def read_file(filename, encoding='utf8'):
'pytest-xdist',
'setuptools',
'tox',
- 'types-mock',
'types-pyOpenSSL',
'types-pyRFC3339',
'types-pytz',
diff --git a/certbot/tests/account_test.py b/certbot/tests/account_test.py
index e034c5f32f2..0037de31e74 100644
--- a/certbot/tests/account_test.py
+++ b/certbot/tests/account_test.py
@@ -4,10 +4,7 @@
import unittest
import josepy as jose
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
import pytz
from acme import messages
diff --git a/certbot/tests/auth_handler_test.py b/certbot/tests/auth_handler_test.py
index ba0323c3e58..23d5b2ae2e6 100644
--- a/certbot/tests/auth_handler_test.py
+++ b/certbot/tests/auth_handler_test.py
@@ -4,10 +4,7 @@
import unittest
from josepy import b64encode
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from acme import challenges
from acme import client as acme_client
diff --git a/certbot/tests/cert_manager_test.py b/certbot/tests/cert_manager_test.py
index 0ed09eccddb..157d45b5512 100644
--- a/certbot/tests/cert_manager_test.py
+++ b/certbot/tests/cert_manager_test.py
@@ -7,10 +7,7 @@
import unittest
import configobj
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot import errors, configuration
from certbot._internal.storage import ALL_FOUR
diff --git a/certbot/tests/cli_test.py b/certbot/tests/cli_test.py
index 82138f52d59..54abe2594c5 100644
--- a/certbot/tests/cli_test.py
+++ b/certbot/tests/cli_test.py
@@ -5,6 +5,7 @@
import io
import tempfile
import unittest
+from unittest import mock
from acme import challenges
from certbot import errors
@@ -16,11 +17,6 @@
import certbot.tests.util as test_util
from certbot.tests.util import TempDirTestCase
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
PLUGINS = disco.PluginsRegistry.find_all()
diff --git a/certbot/tests/client_test.py b/certbot/tests/client_test.py
index 70f46aee79a..6b430831f89 100644
--- a/certbot/tests/client_test.py
+++ b/certbot/tests/client_test.py
@@ -5,6 +5,7 @@
import shutil
import tempfile
import unittest
+from unittest import mock
from unittest.mock import MagicMock
from josepy import interfaces
@@ -17,11 +18,6 @@
from certbot.compat import os
import certbot.tests.util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
KEY = test_util.load_vector("rsa512_key.pem")
CSR_SAN = test_util.load_vector("csr-san_512.pem")
diff --git a/certbot/tests/compat/filesystem_test.py b/certbot/tests/compat/filesystem_test.py
index 9aab49c34fa..a9a258ba290 100644
--- a/certbot/tests/compat/filesystem_test.py
+++ b/certbot/tests/compat/filesystem_test.py
@@ -2,11 +2,7 @@
import contextlib
import errno
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot import util
from certbot._internal import lock
diff --git a/certbot/tests/compat/misc_test.py b/certbot/tests/compat/misc_test.py
index 2155bd5a049..5cb8167b6ba 100644
--- a/certbot/tests/compat/misc_test.py
+++ b/certbot/tests/compat/misc_test.py
@@ -1,10 +1,6 @@
"""Tests for certbot.compat.misc"""
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock # type: ignore
import unittest
-import warnings
+from unittest import mock
from certbot.compat import os
diff --git a/certbot/tests/configuration_test.py b/certbot/tests/configuration_test.py
index 1c122615bd5..61c902bc912 100644
--- a/certbot/tests/configuration_test.py
+++ b/certbot/tests/configuration_test.py
@@ -1,10 +1,6 @@
"""Tests for certbot.configuration."""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot import errors
from certbot._internal import constants
diff --git a/certbot/tests/crypto_util_test.py b/certbot/tests/crypto_util_test.py
index 9a111a0ebf3..3031cf531f9 100644
--- a/certbot/tests/crypto_util_test.py
+++ b/certbot/tests/crypto_util_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot.crypto_util."""
import logging
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
import OpenSSL
from certbot import errors
diff --git a/certbot/tests/display/completer_test.py b/certbot/tests/display/completer_test.py
index a6ada8b9acd..73722151a4f 100644
--- a/certbot/tests/display/completer_test.py
+++ b/certbot/tests/display/completer_test.py
@@ -9,17 +9,12 @@
import string
import sys
import unittest
+from unittest import mock
from certbot.compat import filesystem
from certbot.compat import os
import certbot.tests.util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
-
class CompleterTest(test_util.TempDirTestCase):
"""Test certbot._internal.display.completer.Completer."""
diff --git a/certbot/tests/display/internal_util_test.py b/certbot/tests/display/internal_util_test.py
index 86489b6a51b..b29396c415b 100644
--- a/certbot/tests/display/internal_util_test.py
+++ b/certbot/tests/display/internal_util_test.py
@@ -3,15 +3,11 @@
import socket
import tempfile
import unittest
+from unittest import mock
from acme import messages as acme_messages
from certbot import errors
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
class WrapLinesTest(unittest.TestCase):
def test_wrap_lines(self):
diff --git a/certbot/tests/display/ops_test.py b/certbot/tests/display/ops_test.py
index e00eeb08620..1235190a7f7 100644
--- a/certbot/tests/display/ops_test.py
+++ b/certbot/tests/display/ops_test.py
@@ -2,6 +2,7 @@
"""Test certbot.display.ops."""
import sys
import unittest
+from unittest import mock
import josepy as jose
@@ -15,11 +16,6 @@
from certbot.display import util as display_util
import certbot.tests.util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem"))
diff --git a/certbot/tests/display/util_test.py b/certbot/tests/display/util_test.py
index 7985de753c7..7eb45653c5e 100644
--- a/certbot/tests/display/util_test.py
+++ b/certbot/tests/display/util_test.py
@@ -3,15 +3,11 @@
import socket
import tempfile
import unittest
+from unittest import mock
from certbot import errors
import certbot.tests.util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
class NotifyTest(unittest.TestCase):
"""Tests for certbot.display.util.notify"""
diff --git a/certbot/tests/eff_test.py b/certbot/tests/eff_test.py
index c61f183cb11..6a8ac2c6110 100644
--- a/certbot/tests/eff_test.py
+++ b/certbot/tests/eff_test.py
@@ -1,11 +1,8 @@
"""Tests for certbot._internal.eff."""
import datetime
import unittest
+from unittest import mock
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
import josepy
import pytz
import requests
diff --git a/certbot/tests/error_handler_test.py b/certbot/tests/error_handler_test.py
index 010a756c12b..d6d506956bc 100644
--- a/certbot/tests/error_handler_test.py
+++ b/certbot/tests/error_handler_test.py
@@ -6,15 +6,10 @@
from typing import Dict
from typing import Union
import unittest
+from unittest import mock
from certbot.compat import os
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
-
def get_signals(signums):
"""Get the handlers for an iterable of signums."""
diff --git a/certbot/tests/errors_test.py b/certbot/tests/errors_test.py
index 792868df0c2..d05f2b43ea9 100644
--- a/certbot/tests/errors_test.py
+++ b/certbot/tests/errors_test.py
@@ -1,10 +1,6 @@
"""Tests for certbot.errors."""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from acme import messages
from certbot import achallenges
diff --git a/certbot/tests/helpful_test.py b/certbot/tests/helpful_test.py
index 0abe277bf54..c67211a43fa 100644
--- a/certbot/tests/helpful_test.py
+++ b/certbot/tests/helpful_test.py
@@ -1,10 +1,6 @@
"""Tests for certbot.helpful_parser"""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot import errors
from certbot._internal.cli import HelpfulArgumentParser
diff --git a/certbot/tests/hook_test.py b/certbot/tests/hook_test.py
index fad18dc9f8c..8cd8e663157 100644
--- a/certbot/tests/hook_test.py
+++ b/certbot/tests/hook_test.py
@@ -1,10 +1,6 @@
"""Tests for certbot._internal.hooks."""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot import errors
from certbot import util
diff --git a/certbot/tests/lock_test.py b/certbot/tests/lock_test.py
index b45eb8f7a99..1e752578208 100644
--- a/certbot/tests/lock_test.py
+++ b/certbot/tests/lock_test.py
@@ -2,11 +2,7 @@
import functools
import multiprocessing
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot import errors
from certbot.compat import os
diff --git a/certbot/tests/log_test.py b/certbot/tests/log_test.py
index aec3ac65a96..855582591f8 100644
--- a/certbot/tests/log_test.py
+++ b/certbot/tests/log_test.py
@@ -6,6 +6,7 @@
import time
from typing import Optional
import unittest
+from unittest import mock
from acme import messages
from certbot import errors
@@ -15,11 +16,6 @@
from certbot.compat import os
from certbot.tests import util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
class PreArgParseSetupTest(unittest.TestCase):
diff --git a/certbot/tests/main_test.py b/certbot/tests/main_test.py
index e26b19357bd..579682bdfbb 100644
--- a/certbot/tests/main_test.py
+++ b/certbot/tests/main_test.py
@@ -12,6 +12,7 @@
import traceback
from typing import List
import unittest
+from unittest import mock
import josepy as jose
import pytz
@@ -34,11 +35,6 @@
from certbot.plugins import enhancements
import certbot.tests.util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
CERT_PATH = test_util.vector_path('cert_512.pem')
diff --git a/certbot/tests/ocsp_test.py b/certbot/tests/ocsp_test.py
index c102667bc6b..802787e020d 100644
--- a/certbot/tests/ocsp_test.py
+++ b/certbot/tests/ocsp_test.py
@@ -4,6 +4,7 @@
from datetime import datetime
from datetime import timedelta
import unittest
+from unittest import mock
from cryptography import x509
from cryptography.exceptions import InvalidSignature
@@ -16,11 +17,6 @@
from certbot import errors
from certbot.tests import util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
out = """Missing = in header key=value
ocsp: Use -help for summary.
diff --git a/certbot/tests/plugins/common_test.py b/certbot/tests/plugins/common_test.py
index 46d766bcfaf..215faaea3bb 100644
--- a/certbot/tests/plugins/common_test.py
+++ b/certbot/tests/plugins/common_test.py
@@ -2,12 +2,9 @@
import functools
import shutil
import unittest
+from unittest import mock
import josepy as jose
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
from acme import challenges
from certbot import achallenges
diff --git a/certbot/tests/plugins/disco_test.py b/certbot/tests/plugins/disco_test.py
index 6b599f561db..c564cebce06 100644
--- a/certbot/tests/plugins/disco_test.py
+++ b/certbot/tests/plugins/disco_test.py
@@ -3,6 +3,7 @@
import string
from typing import List
import unittest
+from unittest import mock
import pkg_resources
@@ -12,11 +13,6 @@
from certbot._internal.plugins import standalone
from certbot._internal.plugins import webroot
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
EP_SA = pkg_resources.EntryPoint(
"sa", "certbot._internal.plugins.standalone",
diff --git a/certbot/tests/plugins/dns_common_lexicon_test.py b/certbot/tests/plugins/dns_common_lexicon_test.py
index 40afd107bac..4634c205726 100644
--- a/certbot/tests/plugins/dns_common_lexicon_test.py
+++ b/certbot/tests/plugins/dns_common_lexicon_test.py
@@ -1,11 +1,7 @@
"""Tests for certbot.plugins.dns_common_lexicon."""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot.plugins import dns_common_lexicon
from certbot.plugins import dns_test_common_lexicon
diff --git a/certbot/tests/plugins/dns_common_test.py b/certbot/tests/plugins/dns_common_test.py
index f68d36137ad..97bc5dea62a 100644
--- a/certbot/tests/plugins/dns_common_test.py
+++ b/certbot/tests/plugins/dns_common_test.py
@@ -3,11 +3,7 @@
import collections
import logging
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot import errors
from certbot import util
diff --git a/certbot/tests/plugins/enhancements_test.py b/certbot/tests/plugins/enhancements_test.py
index 62289d95bf7..903d3e0950e 100644
--- a/certbot/tests/plugins/enhancements_test.py
+++ b/certbot/tests/plugins/enhancements_test.py
@@ -1,10 +1,6 @@
"""Tests for new style enhancements"""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot._internal.plugins import null
from certbot.plugins import enhancements
diff --git a/certbot/tests/plugins/manual_test.py b/certbot/tests/plugins/manual_test.py
index cfe2f60fa8e..a5dc69c32f3 100644
--- a/certbot/tests/plugins/manual_test.py
+++ b/certbot/tests/plugins/manual_test.py
@@ -2,11 +2,7 @@
import sys
import textwrap
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from acme import challenges
from certbot import errors
diff --git a/certbot/tests/plugins/null_test.py b/certbot/tests/plugins/null_test.py
index dfdd0a7deb7..ce3440e5bc4 100644
--- a/certbot/tests/plugins/null_test.py
+++ b/certbot/tests/plugins/null_test.py
@@ -1,10 +1,6 @@
"""Tests for certbot._internal.plugins.null."""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
class InstallerTest(unittest.TestCase):
diff --git a/certbot/tests/plugins/selection_test.py b/certbot/tests/plugins/selection_test.py
index b2f38754262..6aed9ec8d22 100644
--- a/certbot/tests/plugins/selection_test.py
+++ b/certbot/tests/plugins/selection_test.py
@@ -2,7 +2,7 @@
import sys
from typing import List
import unittest
-
+from unittest import mock
from certbot import errors
from certbot import interfaces
@@ -11,11 +11,6 @@
from certbot.display import util as display_util
from certbot.tests import util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
class ConveniencePickPluginTest(unittest.TestCase):
"""Tests for certbot._internal.plugins.selection.pick_*."""
diff --git a/certbot/tests/plugins/standalone_test.py b/certbot/tests/plugins/standalone_test.py
index 2649abae929..39454570e7e 100644
--- a/certbot/tests/plugins/standalone_test.py
+++ b/certbot/tests/plugins/standalone_test.py
@@ -5,6 +5,7 @@
from typing import Set
from typing import Tuple
import unittest
+from unittest import mock
import josepy as jose
import OpenSSL.crypto
@@ -16,11 +17,6 @@
from certbot.tests import acme_util
from certbot.tests import util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
class ServerManagerTest(unittest.TestCase):
"""Tests for certbot._internal.plugins.standalone.ServerManager."""
diff --git a/certbot/tests/plugins/storage_test.py b/certbot/tests/plugins/storage_test.py
index 66034b09ec5..a63ef779566 100644
--- a/certbot/tests/plugins/storage_test.py
+++ b/certbot/tests/plugins/storage_test.py
@@ -4,17 +4,13 @@
from typing import List
from typing import Optional
import unittest
+from unittest import mock
from certbot import errors
from certbot.compat import filesystem
from certbot.compat import os
from certbot.tests import util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
class PluginStorageTest(test_util.ConfigTestCase):
diff --git a/certbot/tests/plugins/util_test.py b/certbot/tests/plugins/util_test.py
index 1b4fcd6529f..faac0116532 100644
--- a/certbot/tests/plugins/util_test.py
+++ b/certbot/tests/plugins/util_test.py
@@ -1,10 +1,6 @@
"""Tests for certbot.plugins.util."""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot.compat import os
diff --git a/certbot/tests/plugins/webroot_test.py b/certbot/tests/plugins/webroot_test.py
index d7e96159658..d5ccc4b4f7b 100644
--- a/certbot/tests/plugins/webroot_test.py
+++ b/certbot/tests/plugins/webroot_test.py
@@ -8,12 +8,9 @@
import shutil
import tempfile
import unittest
+from unittest import mock
import josepy as jose
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
from acme import challenges
from certbot import achallenges
diff --git a/certbot/tests/renewal_test.py b/certbot/tests/renewal_test.py
index d6e2866dc56..f3968944f6f 100644
--- a/certbot/tests/renewal_test.py
+++ b/certbot/tests/renewal_test.py
@@ -1,18 +1,13 @@
"""Tests for certbot._internal.renewal"""
import copy
import unittest
+from unittest import mock
from acme import challenges
from certbot import errors, configuration
from certbot._internal import storage
import certbot.tests.util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
-
class RenewalTest(test_util.ConfigTestCase):
@mock.patch('certbot._internal.cli.set_by_cli')
diff --git a/certbot/tests/renewupdater_test.py b/certbot/tests/renewupdater_test.py
index f086e3cf31f..30a7b0f46e7 100644
--- a/certbot/tests/renewupdater_test.py
+++ b/certbot/tests/renewupdater_test.py
@@ -1,10 +1,6 @@
"""Tests for renewal updater interfaces"""
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot import interfaces
from certbot._internal import main
diff --git a/certbot/tests/reverter_test.py b/certbot/tests/reverter_test.py
index e8d85d4d1cd..5124c7d9f9c 100644
--- a/certbot/tests/reverter_test.py
+++ b/certbot/tests/reverter_test.py
@@ -4,11 +4,7 @@
import shutil
import tempfile
import unittest
-
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
+from unittest import mock
from certbot import errors
from certbot.compat import os
diff --git a/certbot/tests/storage_test.py b/certbot/tests/storage_test.py
index c4e42ec379f..3a1f2b7b421 100644
--- a/certbot/tests/storage_test.py
+++ b/certbot/tests/storage_test.py
@@ -4,12 +4,9 @@
import shutil
import stat
import unittest
+from unittest import mock
import configobj
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
import pytz
import certbot
diff --git a/certbot/tests/util_test.py b/certbot/tests/util_test.py
index 0da0976b8eb..e9b5ddef2b2 100644
--- a/certbot/tests/util_test.py
+++ b/certbot/tests/util_test.py
@@ -5,18 +5,13 @@
import io
import sys
import unittest
+from unittest import mock
from certbot import errors
from certbot.compat import filesystem
from certbot.compat import os
import certbot.tests.util as test_util
-try:
- import mock
-except ImportError: # pragma: no cover
- from unittest import mock
-
-
class EnvNoSnapForExternalCallsTest(unittest.TestCase):
"""Tests for certbot.util.env_no_snap_for_external_calls."""
diff --git a/pytest.ini b/pytest.ini
index 42853f26be1..704912685da 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -11,14 +11,12 @@
# we release breaking changes.
#
# The current warnings being ignored are:
-# 1) The warning raised when importing certbot.tests.util and the external mock
-# library is installed.
-# 2) A deprecation warning is raised in dnspython==1.15.0 in the oldest tests for
+# 1) A deprecation warning is raised in dnspython==1.15.0 in the oldest tests for
# certbot-dns-rfc2136.
-# 3) botocore is currently using deprecated urllib3 functionality. See
-# https://github.com/boto/botocore/issues/2744.
+# 2) botocore's default TLS settings raise deprecation warnings in Python
+# 3.10+, but their values are sane from a security perspective. See
+# https://github.com/boto/botocore/issues/2550.
filterwarnings =
error
- ignore:The external mock module:PendingDeprecationWarning
ignore:decodestring\(\) is a deprecated alias:DeprecationWarning:dns
ignore:'urllib3.contrib.pyopenssl:DeprecationWarning:botocore
diff --git a/tools/oldest_constraints.txt b/tools/oldest_constraints.txt
index bbbbc579ade..2e723bd8151 100644
--- a/tools/oldest_constraints.txt
+++ b/tools/oldest_constraints.txt
@@ -45,7 +45,6 @@ jsonschema==3.2.0; python_version >= "3.7"
lazy-object-proxy==1.7.1; python_version >= "3.7"
logger==1.4; python_version >= "3.7"
mccabe==0.7.0; python_version >= "3.7"
-mock==1.0.1
mypy-extensions==0.4.3; python_version >= "3.7"
mypy==0.971; python_version >= "3.7"
ndg-httpsclient==0.3.2
@@ -92,7 +91,6 @@ tomli==2.0.1; python_version < "3.11" and python_version >= "3.7" or python_full
tox==1.9.2; python_version >= "3.7"
typed-ast==1.5.4; python_version >= "3.7" and python_version < "3.8" or implementation_name == "cpython" and python_version < "3.8" and python_version >= "3.7"
types-cryptography==3.3.21; python_version >= "3.7"
-types-mock==4.0.15; python_version >= "3.7"
types-pyopenssl==22.0.9; python_version >= "3.7"
types-pyrfc3339==1.1.1; python_version >= "3.7"
types-python-dateutil==2.8.19; python_version >= "3.7"
diff --git a/tools/pinning/current/pyproject.toml b/tools/pinning/current/pyproject.toml
index eb42ecd342a..77b4fdf8d07 100644
--- a/tools/pinning/current/pyproject.toml
+++ b/tools/pinning/current/pyproject.toml
@@ -50,13 +50,6 @@ awscli = ">=1.22.76"
# as a dependency here to ensure a version of cython is pinned for extra
# stability.
cython = "*"
-# We install mock in our "external-mock" tox environment to test that we didn't
-# break Certbot's test API which used to always use mock objects from the 3rd
-# party mock library. We list the mock dependency here so that is pinned, but
-# we don't depend on it in Certbot to avoid installing mock when it's not
-# needed. This dependency can be removed here once Certbot's support for the
-# 3rd party mock library has been dropped.
-mock = "*"
# poetry 1.2.0+ is required for it to pin pip, setuptools, and wheel. See
# https://github.com/python-poetry/poetry/issues/1584. This version is required
# here in addition to certbot/setup.py because otherwise the pre-release
diff --git a/tools/pinning/oldest/pyproject.toml b/tools/pinning/oldest/pyproject.toml
index b6aed9dc07e..67b835551ab 100644
--- a/tools/pinning/oldest/pyproject.toml
+++ b/tools/pinning/oldest/pyproject.toml
@@ -61,7 +61,6 @@ google-api-python-client = "1.5.5"
httplib2 = "0.9.2"
idna = "2.6"
ipaddress = "1.0.16"
-mock = "1.0.1"
ndg-httpsclient = "0.3.2"
oauth2client = "4.0.0"
parsedatetime = "2.4"
diff --git a/tools/requirements.txt b/tools/requirements.txt
index e25e7f565b5..36b0f512893 100644
--- a/tools/requirements.txt
+++ b/tools/requirements.txt
@@ -84,7 +84,6 @@ lockfile==0.12.2
markupsafe==2.1.1; python_version >= "3.7"
matplotlib-inline==0.1.3; python_version >= "3.7"
mccabe==0.7.0; python_version >= "3.7"
-mock==4.0.3
msgpack==1.0.4; python_version >= "3.7" and python_version < "4.0"
msrest==0.6.21; python_version >= "3.7"
mypy-extensions==0.4.3; python_version >= "3.7"
@@ -170,7 +169,6 @@ traitlets==5.3.0; python_version >= "3.7"
twine==3.3.0; python_version >= "3.7"
typed-ast==1.5.4; python_version >= "3.7" and python_version < "3.8" or implementation_name == "cpython" and python_version < "3.8" and python_version >= "3.7"
types-cryptography==3.3.21; python_version >= "3.7"
-types-mock==4.0.15; python_version >= "3.7"
types-pyopenssl==22.0.9; python_version >= "3.7"
types-pyrfc3339==1.1.1; python_version >= "3.7"
types-python-dateutil==2.8.19; python_version >= "3.7"
diff --git a/tox.ini b/tox.ini
index f4776328d99..8125304f716 100644
--- a/tox.ini
+++ b/tox.ini
@@ -112,11 +112,6 @@ commands =
setenv =
{[testenv:oldest]setenv}
-[testenv:external-mock]
-commands =
- python {toxinidir}/tools/pip_install.py mock
- {[base]install_and_test} {[base]all_packages}
-
[testenv:lint{,-win,-posix}]
basepython = python3
# separating into multiple invocations disables cross package
|
NVIDIA-Merlin__NVTabular-1139 | [BUG] Problem on writing to_parquet after transforming
```python
#######################################
trasforming Code:
class Processjson(Operator):
def transform(self, columns, gdf):
col = gdf['event_properties']
gdf['item_id'] = col.str.extract('\'product_id\'\s*:\s*\'([^\']+)\'')
gdf['event_time'] = (gdf['event_time'] - pd.Timestamp("1970-01-01")) // pd.Timedelta('1s')
gdf['device_brand'] = gdf['device_brand'].fillna('Apple')
return gdf
def output_column_names(self, columns):
return [i for i in columns if (i != 'event_properties')] + ['item_id']
def dependencies(self):
return None
filtered = COLUMNS >> nvt.ops.Filter(lambda df: df['event_type'].isin(['Value']))
filtered = filtered >> nvt.ops.JoinExternal(df_ext=fid_map,on='user_id',columns_ext=['user_id','memberID'])
filtered = filtered>>Processjson() >> nvt.ops.Dropna()
workflow = nvt.Workflow(filtered)
dataset_file = glob.glob('raw/*')
subdataset_file = dataset_file[6:8]
dataset = nvt.Dataset(subdataset_file, part_size="500MB")
workflow.transform(dataset).to_parquet(f'processed/test')
############################################
```
I follow exam and edited to served my data.
When I set multiple files as input for Dataset class, there is some specific files that I loaded and this exception was thrown.
It might be because there is no handling, where some file have data and some have no data after the filtering.
This doesn't happen if I loaded a single file and process separately.
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-60-2a116bd489a4> in <module>
2 # for i in dataset_file:
3 dataset = nvt.Dataset(subdataset_file, part_size="500MB")
----> 4 workflow.transform(dataset).to_parquet(f'processed/test')
5
/usr/local/lib/python3.8/dist-packages/nvtabular/io/dataset.py in to_parquet(self, output_path, shuffle, preserve_files, output_files, out_files_per_proc, num_threads, dtypes, cats, conts, labels, suffix, partition_on)
763
764 # Output dask_cudf DataFrame to dataset
--> 765 _ddf_to_dataset(
766 ddf,
767 fs,
/usr/local/lib/python3.8/dist-packages/nvtabular/io/dask.py in _ddf_to_dataset(ddf, fs, output_path, shuffle, file_partition_map, out_files_per_proc, cat_names, cont_names, label_names, output_format, client, num_threads, cpu, suffix, partition_on)
364 out = client.compute(out).result()
365 else:
--> 366 out = dask.compute(out, scheduler="synchronous")[0]
367
368 if cached_writers:
/usr/local/lib/python3.8/dist-packages/dask/base.py in compute(*args, **kwargs)
564 postcomputes.append(x.__dask_postcompute__())
565
--> 566 results = schedule(dsk, keys, **kwargs)
567 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
568
/usr/local/lib/python3.8/dist-packages/dask/local.py in get_sync(dsk, keys, **kwargs)
558 """
559 kwargs.pop("num_workers", None) # if num_workers present, remove it
--> 560 return get_async(
561 synchronous_executor.submit,
562 synchronous_executor._max_workers,
/usr/local/lib/python3.8/dist-packages/dask/local.py in get_async(submit, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, chunksize, **kwargs)
501 while state["waiting"] or state["ready"] or state["running"]:
502 fire_tasks(chunksize)
--> 503 for key, res_info, failed in queue_get(queue).result():
504 if failed:
505 exc, tb = loads(res_info)
/usr/lib/python3.8/concurrent/futures/_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433
434 self._condition.wait(timeout)
/usr/lib/python3.8/concurrent/futures/_base.py in __get_result(self)
386 def __get_result(self):
387 if self._exception:
--> 388 raise self._exception
389 else:
390 return self._result
/usr/local/lib/python3.8/dist-packages/dask/local.py in submit(self, fn, *args, **kwargs)
543 fut = Future()
544 try:
--> 545 fut.set_result(fn(*args, **kwargs))
546 except BaseException as e:
547 fut.set_exception(e)
/usr/local/lib/python3.8/dist-packages/dask/local.py in batch_execute_tasks(it)
235 Batch computing of multiple tasks with `execute_task`
236 """
--> 237 return [execute_task(*a) for a in it]
238
239
/usr/local/lib/python3.8/dist-packages/dask/local.py in <listcomp>(.0)
235 Batch computing of multiple tasks with `execute_task`
236 """
--> 237 return [execute_task(*a) for a in it]
238
239
/usr/local/lib/python3.8/dist-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
226 failed = False
227 except BaseException as e:
--> 228 result = pack_exception(e, dumps)
229 failed = True
230 return key, result, failed
/usr/local/lib/python3.8/dist-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
221 try:
222 task, data = loads(task_info)
--> 223 result = _execute_task(task, data)
224 id = get_id()
225 result = dumps((result, id))
/usr/local/lib/python3.8/dist-packages/dask/core.py in _execute_task(arg, cache, dsk)
119 # temporaries by their reference count and can execute certain
120 # operations in-place.
--> 121 return func(*(_execute_task(a, cache) for a in args))
122 elif not ishashable(arg):
123 return arg
/usr/lib/python3.8/contextlib.py in inner(*args, **kwds)
73 def inner(*args, **kwds):
74 with self._recreate_cm():
---> 75 return func(*args, **kwds)
76 return inner
77
/usr/local/lib/python3.8/dist-packages/nvtabular/io/dask.py in _write_output_partition(df, processed_path, shuffle, out_files_per_proc, fs, cat_names, cont_names, label_names, output_format, num_threads, cpu, suffix)
92
93 # Add data
---> 94 writer.add_data(df)
95
96 return df_size
/usr/lib/python3.8/contextlib.py in inner(*args, **kwds)
73 def inner(*args, **kwds):
74 with self._recreate_cm():
---> 75 return func(*args, **kwds)
76 return inner
77
/usr/local/lib/python3.8/dist-packages/nvtabular/io/writer.py in add_data(self, df)
137 # Only writing to a single file. No need to
138 # scatter or slice the data before writing
--> 139 self._add_single_file(df)
140 else:
141 # Use different mechanism to decompose and write each df
/usr/local/lib/python3.8/dist-packages/nvtabular/io/writer.py in _add_single_file(self, df)
224 self.queue.put((0, df))
225 else:
--> 226 self._write_table(0, df)
227
228 def package_general_metadata(self):
/usr/local/lib/python3.8/dist-packages/nvtabular/io/parquet.py in _write_table(self, idx, data)
788 def _write_table(self, idx, data):
789 writer = self._get_or_create_writer(idx)
--> 790 writer.write_table(data)
791
792 @classmethod
cudf/_lib/parquet.pyx in cudf._lib.parquet.ParquetWriter.write_table()
RuntimeError: cuDF failure at: /workspace/build-env/cpp/src/io/parquet/writer_impl.cu:462: Leaf column's corresponding metadata cannot have children
| [
{
"content": "#\n# Copyright (c) 2021, NVIDIA CORPORATION.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless requ... | [
{
"content": "#\n# Copyright (c) 2021, NVIDIA CORPORATION.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless requ... | diff --git a/nvtabular/workflow/workflow.py b/nvtabular/workflow/workflow.py
index cf433cceffd..87aba3071d6 100644
--- a/nvtabular/workflow/workflow.py
+++ b/nvtabular/workflow/workflow.py
@@ -433,6 +433,7 @@ def _transform_ddf(ddf, workflow_nodes, meta=None, additional_columns=None):
workflow_nodes,
additional_columns=additional_columns,
meta=meta,
+ enforce_metadata=False,
)
diff --git a/tests/unit/test_dask_nvt.py b/tests/unit/test_dask_nvt.py
index c08b7c8e1cc..de35101c31b 100644
--- a/tests/unit/test_dask_nvt.py
+++ b/tests/unit/test_dask_nvt.py
@@ -20,8 +20,10 @@
import cudf
import dask_cudf
+import pandas as pd
import pytest
from dask.dataframe import assert_eq
+from dask.dataframe import from_pandas as dd_from_pandas
from dask.dataframe import read_parquet as dd_read_parquet
from nvtabular import ColumnSelector, Dataset, Workflow, ops
@@ -276,3 +278,18 @@ def test_dask_preproc_cpu(client, tmpdir, datasets, engine, shuffle, cpu):
df_disk.sort_values(["id", "x"])[["name-string", "label"]],
check_index=False,
)
+
+
+@pytest.mark.parametrize("cpu", [None, True])
+def test_filtered_partition(tmpdir, cpu):
+ # Toy DataFrame example
+ df = pd.DataFrame({"col": range(100)})
+ ddf = dd_from_pandas(df, npartitions=5)
+ dataset = Dataset(ddf, cpu=cpu)
+
+ # Workflow
+ filtered = ["col"] >> ops.Filter(lambda df: df["col"] < 75)
+ workflow = Workflow(filtered)
+
+ # Write result to disk
+ workflow.transform(dataset).to_parquet(str(tmpdir))
|
zulip__zulip-20788 | "Pan and zoom" cuts off images instead of using the available space
If you have a tall image and a wide monitor (and wide browser viewport), and you try to zoom… the image stays trapped inside the same box it occupied before you even tried to zoom. If the image is super wide instead of tall, the same thing happens the other way around.
This leads to a lot of frustrating panning around, to look at the different parts of the image through this narrow keyhole, while tons of screen space next to it doesn't get used.
This is the biggest of the issues described by @vanclute in #18939. It was reported again by @alexanderglueck as #19837, and I just ran into it myself ([chat](https://chat.zulip.org/#narrow/stream/6-frontend/topic/pan.2Fzoom/near/1308717)). Here's a nice illustration from #19837:

Instead, when zooming we should use the full space available. This may be bigger than the area the image occupied when it was scaled down to fit completely in the space available, because the available box may have a different aspect ratio from the image.
| [
{
"content": "import os\n\nZULIP_VERSION = \"5.0-dev+git\"\n\n# Add information on number of commits and commit hash to version, if available\nzulip_git_version_file = os.path.join(\n os.path.dirname(os.path.abspath(__file__)), \"zulip-git-version\"\n)\nlines = [ZULIP_VERSION, \"\"]\nif os.path.exists(zulip_... | [
{
"content": "import os\n\nZULIP_VERSION = \"5.0-dev+git\"\n\n# Add information on number of commits and commit hash to version, if available\nzulip_git_version_file = os.path.join(\n os.path.dirname(os.path.abspath(__file__)), \"zulip-git-version\"\n)\nlines = [ZULIP_VERSION, \"\"]\nif os.path.exists(zulip_... | diff --git a/frontend_tests/node_tests/lightbox.js b/frontend_tests/node_tests/lightbox.js
index 5a2b09837b38b..3a132d6dcd0d0 100644
--- a/frontend_tests/node_tests/lightbox.js
+++ b/frontend_tests/node_tests/lightbox.js
@@ -54,8 +54,8 @@ test("pan_and_zoom", ({override_rewire}) => {
};
override_rewire(lightbox, "render_lightbox_list_images", () => {});
-
- lightbox.open(img);
+ const open_image = lightbox.build_open_image_function();
+ open_image(img);
assert.equal(fetched_zid, 1234);
});
@@ -88,6 +88,7 @@ test("youtube", ({override_rewire}) => {
override_rewire(lightbox, "render_lightbox_list_images", () => {});
- lightbox.open(img);
+ const open_image = lightbox.build_open_image_function();
+ open_image(img);
assert.equal($(".image-actions .open").attr("href"), href);
});
diff --git a/package.json b/package.json
index b30b2fd67185b..3b71f14edef90 100644
--- a/package.json
+++ b/package.json
@@ -14,6 +14,7 @@
"@formatjs/intl": "^1.9.7",
"@giphy/js-components": "^5.0.5",
"@giphy/js-fetch-api": "^4.0.1",
+ "@panzoom/panzoom": "^4.4.3",
"@uppy/core": "^1.7.1",
"@uppy/progress-bar": "^1.3.4",
"@uppy/xhr-upload": "^1.4.2",
diff --git a/static/js/lightbox.js b/static/js/lightbox.js
index 84d6db7cb33e3..04496efcc6cbb 100644
--- a/static/js/lightbox.js
+++ b/static/js/lightbox.js
@@ -1,9 +1,9 @@
+import panzoom from "@panzoom/panzoom";
import $ from "jquery";
import render_lightbox_overlay from "../templates/lightbox_overlay.hbs";
import * as blueslip from "./blueslip";
-import {LightboxCanvas} from "./lightbox_canvas";
import * as message_store from "./message_store";
import * as overlays from "./overlays";
import * as people from "./people";
@@ -15,6 +15,81 @@ let is_open = false;
// memoized instead of being looked up multiple times.
const asset_map = new Map();
+export class PanZoomControl {
+ // Class for both initializing and controlling the
+ // the pan/zoom functionality.
+ constructor(container) {
+ this.container = container;
+ this.panzoom = panzoom(this.container, {
+ disablePan: true,
+ disableZoom: true,
+ cursor: "auto",
+ });
+
+ // The following events are necessary to prevent the click event
+ // firing where the user "unclicks" at the end of the drag, which
+ // was causing accidental overlay closes in some situations.
+ this.container.addEventListener("panzoomstart", () => {
+ // Marks this overlay as needing to stay open.
+ $("#lightbox_overlay").data("noclose", true);
+ });
+
+ this.container.addEventListener("panzoomend", () => {
+ // Don't remove the noclose attribute on this overlay until after paint,
+ // otherwise it will be removed too early and close the lightbox
+ // unintentionally.
+ setTimeout(() => {
+ $("#lightbox_overlay").data("noclose", false);
+ }, 0);
+ });
+
+ // keybinds
+ document.addEventListener("keydown", (e) => {
+ if (!overlays.lightbox_open()) {
+ return;
+ }
+ switch (e.key) {
+ case "Z":
+ case "+":
+ this.zoomIn();
+ break;
+ case "z":
+ case "-":
+ this.zoomOut();
+ break;
+ case "v":
+ overlays.close_overlay("lightbox");
+ break;
+ }
+ e.preventDefault();
+ e.stopPropagation();
+ });
+ }
+
+ reset() {
+ this.panzoom.reset();
+ }
+
+ disablePanZoom() {
+ this.container.removeEventListener("wheel", this.panzoom.zoomWithWheel);
+ this.panzoom.setOptions({disableZoom: true, disablePan: true, cursor: "auto"});
+ this.reset();
+ }
+
+ enablePanZoom() {
+ this.panzoom.setOptions({disableZoom: false, disablePan: false, cursor: "move"});
+ this.container.addEventListener("wheel", this.panzoom.zoomWithWheel);
+ }
+
+ zoomIn() {
+ this.panzoom.zoomIn();
+ }
+
+ zoomOut() {
+ this.panzoom.zoomOut();
+ }
+}
+
export function clear_for_testing() {
is_open = false;
asset_map.clear();
@@ -51,21 +126,10 @@ function display_image(payload) {
$(".player-container").hide();
$(".image-actions, .image-description, .download, .lightbox-canvas-trigger").show();
- const lightbox_canvas = $(".lightbox-canvas-trigger").hasClass("enabled");
-
- if (lightbox_canvas === true) {
- const canvas = document.createElement("canvas");
- canvas.dataset.src = payload.source;
-
- $("#lightbox_overlay .image-preview").html(canvas).show();
- const photo = new LightboxCanvas(canvas);
- photo.speed(2.3);
- } else {
- const img = new Image();
- img.src = payload.source;
-
- $("#lightbox_overlay .image-preview").html(img).show();
- }
+ const img_container = $("#lightbox_overlay .image-preview > .zoom-element");
+ const img = new Image();
+ img.src = payload.source;
+ img_container.html(img).show();
$(".image-description .title").text(payload.title || "N/A");
$(".image-description .user").text(payload.user);
@@ -112,55 +176,59 @@ function display_video(payload) {
$(".image-actions .open").attr("href", payload.url);
}
-export function open($image) {
- // if the asset_map already contains the metadata required to display the
- // asset, just recall that metadata.
- let $preview_src = $image.attr("src");
- let payload = asset_map.get($preview_src);
- if (payload === undefined) {
- if ($preview_src.endsWith("&size=full")) {
- // while fetching an image for canvas, `src` attribute supplies
- // full-sized image instead of thumbnail, so we have to replace
- // `size=full` with `size=thumbnail`.
- //
- // TODO: This is a hack to work around the fact that for
- // the lightbox canvas, the `src` is the data-fullsize-src
- // for the image, not the original thumbnail used to open
- // the lightbox. A better fix will be to check a
- // `data-thumbnail-src` attribute that we add to the
- // canvas elements.
- $preview_src = $preview_src.replace(/.{4}$/, "thumbnail");
- payload = asset_map.get($preview_src);
- }
- if (payload === undefined) {
- payload = parse_image_data($image);
- }
+export function build_open_image_function(on_close) {
+ if (on_close === undefined) {
+ on_close = function () {
+ $(".player-container iframe").remove();
+ is_open = false;
+ document.activeElement.blur();
+ };
}
- if (payload.type.match("-video")) {
- display_video(payload);
- } else if (payload.type === "image") {
- display_image(payload);
- }
+ return function ($image) {
+ // if the asset_map already contains the metadata required to display the
+ // asset, just recall that metadata.
+ let $preview_src = $image.attr("src");
+ let payload = asset_map.get($preview_src);
+ if (payload === undefined) {
+ if ($preview_src.endsWith("&size=full")) {
+ // while fetching an image for canvas, `src` attribute supplies
+ // full-sized image instead of thumbnail, so we have to replace
+ // `size=full` with `size=thumbnail`.
+ //
+ // TODO: This is a hack to work around the fact that for
+ // the lightbox canvas, the `src` is the data-fullsize-src
+ // for the image, not the original thumbnail used to open
+ // the lightbox. A better fix will be to check a
+ // `data-thumbnail-src` attribute that we add to the
+ // canvas elements.
+ $preview_src = $preview_src.replace(/.{4}$/, "thumbnail");
+ payload = asset_map.get($preview_src);
+ }
+ if (payload === undefined) {
+ payload = parse_image_data($image);
+ }
+ }
- if (is_open) {
- return;
- }
+ if (payload.type.match("-video")) {
+ display_video(payload);
+ } else if (payload.type === "image") {
+ display_image(payload);
+ }
- function lightbox_close_overlay() {
- $(".player-container iframe").remove();
- is_open = false;
- document.activeElement.blur();
- }
+ if (is_open) {
+ return;
+ }
- overlays.open_overlay({
- name: "lightbox",
- overlay: $("#lightbox_overlay"),
- on_close: lightbox_close_overlay,
- });
+ overlays.open_overlay({
+ name: "lightbox",
+ overlay: $("#lightbox_overlay"),
+ on_close,
+ });
- popovers.hide_all();
- is_open = true;
+ popovers.hide_all();
+ is_open = true;
+ };
}
export function show_from_selected_message() {
@@ -202,7 +270,8 @@ export function show_from_selected_message() {
}
if ($image.length !== 0) {
- open($image);
+ const open_image = build_open_image_function();
+ open_image($image);
}
}
@@ -282,16 +351,32 @@ export function next() {
// this is a block of events that are required for the lightbox to work.
export function initialize() {
+ // Renders the DOM for the lightbox.
const rendered_lightbox_overlay = render_lightbox_overlay();
$("body").append(rendered_lightbox_overlay);
+ // Bind the pan/zoom control the newly created element.
+ const pan_zoom_control = new PanZoomControl(
+ $("#lightbox_overlay .image-preview > .zoom-element")[0],
+ );
+
+ const reset_lightbox_state = function () {
+ $(".player-container iframe").remove();
+ is_open = false;
+ document.activeElement.blur();
+ pan_zoom_control.disablePanZoom();
+ $(".lightbox-canvas-trigger").removeClass("enabled");
+ };
+
+ const open_image = build_open_image_function(reset_lightbox_state);
+
$("#main_div, #compose .preview_content").on("click", ".message_inline_image a", function (e) {
// prevent the link from opening in a new page.
e.preventDefault();
// prevent the message compose dialog from happening.
e.stopPropagation();
const $img = $(this).find("img");
- open($img);
+ open_image($img);
});
$("#lightbox_overlay .download").on("click", function () {
@@ -304,10 +389,11 @@ export function initialize() {
`.message_row img[src='${CSS.escape($(this).attr("data-src"))}']`,
);
- open($original_image);
+ open_image($original_image);
$(".image-list .image.selected").removeClass("selected");
$(this).addClass("selected");
+ pan_zoom_control.reset();
const parentOffset = this.parentNode.clientWidth + this.parentNode.scrollLeft;
// this is the left and right of the image compared to its parent.
@@ -341,18 +427,15 @@ export function initialize() {
});
$("#lightbox_overlay").on("click", ".lightbox-canvas-trigger", function () {
- let $img = $("#lightbox_overlay").find(".image-preview img");
-
- if ($img.length) {
- $(this).addClass("enabled");
- // the `lightbox.open` function will see the enabled class and
- // enable the `LightboxCanvas` class.
- open($img);
- } else {
- $img = $($("#lightbox_overlay").find(".image-preview canvas")[0].image);
+ const $img = $("#lightbox_overlay").find(".image-preview img");
+ open_image($img);
+ if ($(this).hasClass("enabled")) {
+ pan_zoom_control.disablePanZoom();
$(this).removeClass("enabled");
- open($img);
+ } else {
+ pan_zoom_control.enablePanZoom();
+ $(this).addClass("enabled");
}
});
@@ -363,12 +446,14 @@ export function initialize() {
$("#lightbox_overlay .player-container").on("click", function () {
if ($(this).is(".player-container")) {
+ reset_lightbox_state();
overlays.close_active();
}
});
$("#lightbox_overlay").on("click", ".image-info-wrapper, .center", (e) => {
if ($(e.target).is(".image-info-wrapper, .center")) {
+ reset_lightbox_state();
overlays.close_overlay("lightbox");
}
});
diff --git a/static/js/lightbox_canvas.js b/static/js/lightbox_canvas.js
deleted file mode 100644
index 5550d9a0a94f2..0000000000000
--- a/static/js/lightbox_canvas.js
+++ /dev/null
@@ -1,279 +0,0 @@
-import * as blueslip from "./blueslip";
-import * as overlays from "./overlays";
-
-const funcs = {
- setZoom(meta, zoom) {
- // condition to handle zooming event by zoom hotkeys
- if (zoom === "+") {
- zoom = meta.zoom * 1.2;
- } else if (zoom === "-") {
- zoom = meta.zoom / 1.2;
- }
- // make sure the zoom is above 1 and below the maxZoom.
- meta.zoom = Math.min(Math.max(zoom, 1), meta.maxZoom);
- },
-
- // this is a function given a canvas that attaches all of the events
- // required to pan and zoom.
- attachEvents(canvas, context, meta) {
- let mousedown = false;
-
- // wheelEvent.deltaMode is a value that describes what the unit is
- // for the `deltaX`, `deltaY`, and `deltaZ` properties.
- const DELTA_MODE = {
- PIXEL: 0,
- LINE: 1,
- PAGE: 2,
- };
-
- // use the wheel event rather than scroll because this isn't
- // actually an element that can scroll. The wheel event will
- // detect the *gesture* of scrolling over an element, without actually
- // worrying about scrollable content.
- canvas.addEventListener("wheel", (e) => {
- e.preventDefault();
-
- // this is to reverse scrolling directions for the image.
- let delta = meta.direction * e.deltaY;
-
- if (e.deltaMode === DELTA_MODE.LINE) {
- // the vertical height in pixels of an approximate line.
- delta *= 15;
- }
-
- if (e.deltaMode === DELTA_MODE.PAGE) {
- // the vertical height in pixels of an approximate page.
- delta *= 300;
- }
-
- // this is calculated as the user defined speed times the normalizer
- // (which just is what it takes to take the raw delta and transform
- // it to a normal speed), multiply it against the current zoom.
- // Example:
- // delta = 8
- // normalizedDelta = delta * (1 / 20) * 1 = 0.4
- // zoom = zoom * (0.4 / 100) + 1
- const zoom =
- meta.zoom * ((meta.speed * meta.internalSpeedMultiplier * delta) / 100 + 1);
-
- funcs.setZoom(meta, zoom);
- funcs.displayImage(canvas, context, meta);
-
- return false;
- });
-
- // the only valid mousedown events should originate inside of the
- // canvas.
- canvas.addEventListener("mousedown", () => {
- mousedown = true;
- });
-
- // on mousemove, actually run the pan events.
- canvas.addEventListener("mousemove", (e) => {
- // to pan, there must be mousedown and mousemove, check if valid.
- if (mousedown === true) {
- // find the percent of movement relative to the canvas width
- // since e.movementX, e.movementY are in px.
- const percentMovement = {
- x: e.movementX / canvas.width,
- y: e.movementY / canvas.height,
- };
-
- // add the percentMovement to the meta coordinates but divide
- // out by the zoom ratio because when zoomed in 10x for example
- // moving the photo by 1% will appear like 10% on the <canvas>.
- meta.coords.x += (percentMovement.x * 2) / meta.zoom;
- meta.coords.y += (percentMovement.y * 2) / meta.zoom;
-
- // redraw the image.
- funcs.displayImage(canvas, context, meta);
- }
- });
-
- // event listener to handle zoom in and out from using keyboard keys z/Z and +/-
- // in the canvas
- // these hotkeys are not implemented in static/js/hotkey.js as the code in
- // static/js/lightbox_canvas.js and static/js/lightbox.js isn't written a way
- // that the LightboxCanvas instance created in lightbox.js can be
- // accessed from hotkey.js. Major code refactoring is required in lightbox.js
- // to implement these keyboard shortcuts in hotkey.js
- document.addEventListener("keydown", (e) => {
- if (!overlays.lightbox_open()) {
- return;
- }
- switch (e.key) {
- case "Z":
- case "+":
- funcs.setZoom(meta, "+");
- funcs.displayImage(canvas, context, meta);
- break;
- case "z":
- case "-":
- funcs.setZoom(meta, "-");
- funcs.displayImage(canvas, context, meta);
- break;
- case "v":
- overlays.close_overlay("lightbox");
- break;
- }
- e.preventDefault();
- e.stopPropagation();
- });
-
- // make sure that when the mousedown is lifted on <canvas>to prevent
- // panning events.
- canvas.addEventListener("mouseup", () => {
- mousedown = false;
- });
-
- // do so on the document.body as well, though depending on the infra,
- // these are less reliable as preventDefault may prevent these events
- // from propagating all the way to the <body>.
- document.body.addEventListener("mouseup", function body_mouseup() {
- if (document.body.contains(canvas)) {
- mousedown = false;
- } else {
- document.body.removeEventListener("mouseup", body_mouseup);
- }
- });
-
- window.addEventListener("resize", function window_resize() {
- if (document.body.contains(canvas)) {
- funcs.sizeCanvas(canvas, meta);
- funcs.displayImage(canvas, context, meta);
- } else {
- window.removeEventListener("resize", window_resize);
- }
- });
- },
-
- imageRatio(image) {
- return image.naturalWidth / image.naturalHeight;
- },
-
- displayImage(canvas, context, meta) {
- meta.coords.x = Math.max(1 / (meta.zoom * 2), meta.coords.x);
- meta.coords.x = Math.min(1 - 1 / (meta.zoom * 2), meta.coords.x);
-
- meta.coords.y = Math.max(1 / (meta.zoom * 2), meta.coords.y);
- meta.coords.y = Math.min(1 - 1 / (meta.zoom * 2), meta.coords.y);
-
- const c = {
- x: meta.coords.x - 1,
- y: meta.coords.y - 1,
- };
-
- const x = meta.zoom * c.x * canvas.width + canvas.width / 2;
- const y = meta.zoom * c.y * canvas.height + canvas.height / 2;
- const w = canvas.width * meta.zoom;
- const h = canvas.height * meta.zoom;
-
- context.clearRect(0, 0, canvas.width, canvas.height);
- context.imageSmoothingEnabled = false;
-
- context.drawImage(meta.image, x, y, w, h);
- },
-
- // the `sizeCanvas` method figures out the appropriate bounding box for
- // the canvas given a parent that has constraints.
- // for example, if a photo has a ration of 1.5:1 (w:h), and the parent
- // box is 1:1 respectively, we want to stretch the photo to be as large
- // as we can, which means that we check if having the photo width = 100%
- // means that the height is less than 100% of the parent height. If so,
- // then we size the photo as w = 100%, h = 100% / 1.5.
- sizeCanvas(canvas, meta) {
- if (canvas.parentNode === null) {
- return;
- }
-
- if (typeof meta.resize_handler === "function") {
- meta.resize_handler(canvas);
- }
-
- const parent = {
- width: canvas.parentNode.clientWidth,
- height: canvas.parentNode.clientHeight,
- };
-
- if (parent.height * meta.ratio > parent.width) {
- canvas.width = parent.width * 2;
- canvas.style.width = parent.width + "px";
-
- canvas.height = (parent.width / meta.ratio) * 2;
- canvas.style.height = parent.width / meta.ratio + "px";
- } else {
- canvas.height = parent.height * 2;
- canvas.style.height = parent.height + "px";
-
- canvas.width = parent.height * meta.ratio * 2;
- canvas.style.width = parent.height * meta.ratio + "px";
- }
-
- blueslip.warn("Please specify a 'data-width' or 'data-height' argument for canvas.");
- },
-};
-
-export class LightboxCanvas {
- meta = {
- direction: -1,
- zoom: 1,
- image: null,
- coords: {
- x: 0.5,
- y: 0.5,
- },
- speed: 1,
- // this is to normalize the speed to what I would consider to be
- // "standard" zoom speed.
- internalSpeedMultiplier: 0.05,
- maxZoom: 10,
- };
-
- constructor(el) {
- if (el instanceof Node) {
- this.canvas = el;
- } else if (typeof el === "string") {
- this.canvas = document.querySelector(el);
- } else {
- throw new TypeError("'LightboxCanvas' accepts either string selector or node.");
- }
-
- this.context = this.canvas.getContext("2d");
-
- this.meta.image = new Image();
- this.meta.image.src = this.canvas.dataset.src;
- this.meta.image.addEventListener("load", () => {
- this.meta.ratio = funcs.imageRatio(this.meta.image);
-
- funcs.sizeCanvas(this.canvas, this.meta);
- funcs.displayImage(this.canvas, this.context, this.meta);
- });
-
- this.canvas.image = this.meta.image;
-
- funcs.attachEvents(this.canvas, this.context, this.meta);
- }
-
- // set the speed at which scrolling zooms in on a photo.
- speed(speed) {
- this.meta.speed = speed;
- }
-
- // set the max zoom of the `LightboxCanvas` canvas as a mult of the total width.
- maxZoom(maxZoom) {
- this.meta.maxZoom = maxZoom;
- }
-
- reverseScrollDirection() {
- this.meta.direction = 1;
- }
-
- setZoom(zoom) {
- funcs.setZoom(this.meta, zoom);
- funcs.displayImage(this.canvas, this.context, this.meta);
- }
-
- resize(callback) {
- this.meta.resize_handler = callback;
- }
-}
diff --git a/static/js/overlays.js b/static/js/overlays.js
index 1cee48eecc798..3d0674d067eae 100644
--- a/static/js/overlays.js
+++ b/static/js/overlays.js
@@ -337,6 +337,11 @@ export function initialize() {
return;
}
+ if ($target.data("noclose")) {
+ // This overlay has been marked explicitly to not be closed.
+ return;
+ }
+
const target_name = $target.attr("data-overlay");
close_overlay(target_name);
diff --git a/static/styles/lightbox.css b/static/styles/lightbox.css
index 80383d14f933d..937572900d1c7 100644
--- a/static/styles/lightbox.css
+++ b/static/styles/lightbox.css
@@ -36,6 +36,14 @@
canvas {
cursor: pointer;
}
+
+ .zoom-element {
+ width: 100%;
+ height: 100%;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ }
}
.exit {
@@ -141,9 +149,6 @@
}
.image-description {
- width: 100%;
- /* approx width of screen minus action buttons on the side. */
- max-width: calc(100% - 450px);
/* add some extra margin top and remove some bottom to keep the
height the same. and vertically center the text with the buttons. */
margin-top: 25px;
diff --git a/static/templates/lightbox_overlay.hbs b/static/templates/lightbox_overlay.hbs
index 69d54a452de45..20fde407d3f99 100644
--- a/static/templates/lightbox_overlay.hbs
+++ b/static/templates/lightbox_overlay.hbs
@@ -1,4 +1,4 @@
-<div id="lightbox_overlay" class="overlay new-style" data-overlay="lightbox">
+<div id="lightbox_overlay" class="overlay new-style" data-overlay="lightbox" data-noclose="false">
<div class="image-info-wrapper">
<div class="image-description">
<div class="title"></div>
@@ -16,7 +16,9 @@
<div class="clear-float"></div>
</div>
- <div class="image-preview overlay-content no-select"></div>
+ <div class="image-preview no-select">
+ <div class="zoom-element no-select"></div>
+ </div>
<div class="player-container"></div>
<div class="center">
<div class="arrow no-select" data-direction="prev"><</div>
diff --git a/tools/test-js-with-node b/tools/test-js-with-node
index b2260bd1e2b70..52647a7ce12c2 100755
--- a/tools/test-js-with-node
+++ b/tools/test-js-with-node
@@ -93,7 +93,6 @@ EXEMPT_FILES = make_set(
"static/js/info_overlay.js",
"static/js/invite.js",
"static/js/lightbox.js",
- "static/js/lightbox_canvas.js",
"static/js/list_util.ts",
"static/js/loading.ts",
"static/js/local_message.js",
diff --git a/version.py b/version.py
index 71905ba4258ab..7cd446a79cc88 100644
--- a/version.py
+++ b/version.py
@@ -48,4 +48,4 @@
# historical commits sharing the same major version, in which case a
# minor version bump suffices.
-PROVISION_VERSION = "173.3"
+PROVISION_VERSION = "173.4"
diff --git a/yarn.lock b/yarn.lock
index bc2e63651d2d4..5d8188ce790a0 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1377,6 +1377,11 @@
mkdirp "^1.0.4"
rimraf "^3.0.2"
+"@panzoom/panzoom@^4.4.3":
+ version "4.4.3"
+ resolved "https://registry.yarnpkg.com/@panzoom/panzoom/-/panzoom-4.4.3.tgz#439ef0c3eba1cba0ad9b661fda5961aa2e2eec64"
+ integrity sha512-fTAr7/bc9ukvWKxxqdoAuIhKhvu6TwuNiGcA0N3lrSj5OZGlISGLXcSZZyN7kgqH/6icYS7b18UT/Iq/W2rTOA==
+
"@plotly/d3-sankey-circular@0.33.1":
version "0.33.1"
resolved "https://registry.yarnpkg.com/@plotly/d3-sankey-circular/-/d3-sankey-circular-0.33.1.tgz#15d1e0337e0e4b1135bdf0e2195c88adacace1a7"
|
huggingface__diffusers-6507 | StableVideoDiffusionPipeline returns a list instead of np.ndarray for output_type="np"
### Describe the bug
The [comments](https://github.com/huggingface/diffusers/blob/4497b3ec982978eca99895ed1429addde4a84fff/src/diffusers/pipelines/stable_video_diffusion/pipeline_stable_video_diffusion.py#L64) for StableVideoDiffusionPipelineOutput (which is returned by the `__call__()` function of StableVideoDiffusionPipeline) indicate that `np.ndarray` is one of the possible return types for the `frames` field but when I set `output_type="np"` for `__call__()` the `frames` field is a list of `np.ndarray`.
I think the problem is that the output of the `tensor2vid()` call [here](https://github.com/huggingface/diffusers/blob/4497b3ec982978eca99895ed1429addde4a84fff/src/diffusers/pipelines/stable_video_diffusion/pipeline_stable_video_diffusion.py#L534) is a list and not a `np.ndarray`. I have a local commit that uses `np.stack()` to convert the list to a `np.ndarray` that I plan to push up as a PR shortly.
### Reproduction
```
from diffusers import StableVideoDiffusionPipeline
from diffusers.utils import load_image
import torch
import time
repo_id = "stabilityai/stable-video-diffusion-img2vid-xt"
cache_dir = "./cache"
pipeline = StableVideoDiffusionPipeline.from_pretrained(
repo_id, cache_dir=cache_dir, variant="fp16", torch_dtype=torch.float16
)
pipeline.to("cuda")
# Load the conditioning image
image = load_image(
"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/svd/rocket.png"
)
image = [image.resize((1024, 576))]
image *= 2
generator = torch.manual_seed(42)
frames = pipeline(
image,
decode_chunk_size=8,
generator=generator,
output_type="np",
).frames
print(type(frames))
print(frames.shape)
```
### Logs
```shell
Loading pipeline components...: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 5/5 [00:00<00:00, 11.34it/s]
100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 25/25 [01:33<00:00, 3.72s/it]
<class 'list'>
Traceback (most recent call last):
File "/home/user/ai-worker/jobs/containers/svd-xt-film/test_svd.py", line 33, in <module>
print(frames.shape)
^^^^^^^^^^^^
AttributeError: 'list' object has no attribute 'shape'
```
### System Info
- `diffusers` version: 0.25.0
- Platform: Linux-5.4.0-166-generic-x86_64-with-glibc2.31
- Python version: 3.11.7
- PyTorch version (GPU?): 2.1.2+cu121 (True)
- Huggingface_hub version: 0.20.2
- Transformers version: 4.36.2
- Accelerate version: 0.25.0
- xFormers version: 0.0.23.post1
- Using GPU in script?: Nvidia RTX 4090
### Who can help?
@patil-suraj @patrick
| [
{
"content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#... | [
{
"content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#... | diff --git a/src/diffusers/pipelines/stable_video_diffusion/pipeline_stable_video_diffusion.py b/src/diffusers/pipelines/stable_video_diffusion/pipeline_stable_video_diffusion.py
index fa96f41cd81f..a0c3be089ece 100644
--- a/src/diffusers/pipelines/stable_video_diffusion/pipeline_stable_video_diffusion.py
+++ b/src/diffusers/pipelines/stable_video_diffusion/pipeline_stable_video_diffusion.py
@@ -52,6 +52,9 @@ def tensor2vid(video: torch.Tensor, processor, output_type="np"):
outputs.append(batch_output)
+ if output_type == "np":
+ return np.stack(outputs)
+
return outputs
diff --git a/tests/pipelines/stable_video_diffusion/test_stable_video_diffusion.py b/tests/pipelines/stable_video_diffusion/test_stable_video_diffusion.py
index 11978424368f..871266fb9c24 100644
--- a/tests/pipelines/stable_video_diffusion/test_stable_video_diffusion.py
+++ b/tests/pipelines/stable_video_diffusion/test_stable_video_diffusion.py
@@ -185,6 +185,23 @@ def test_inference_batch_single_identical(
def test_inference_batch_consistent(self):
pass
+ def test_np_output_type(self):
+ components = self.get_dummy_components()
+ pipe = self.pipeline_class(**components)
+ for component in pipe.components.values():
+ if hasattr(component, "set_default_attn_processor"):
+ component.set_default_attn_processor()
+
+ pipe.to(torch_device)
+ pipe.set_progress_bar_config(disable=None)
+
+ generator_device = "cpu"
+ inputs = self.get_dummy_inputs(generator_device)
+ inputs["output_type"] = "np"
+ output = pipe(**inputs).frames
+ self.assertTrue(isinstance(output, np.ndarray))
+ self.assertEqual(len(output.shape), 5)
+
def test_dict_tuple_outputs_equivalent(self, expected_max_difference=1e-4):
components = self.get_dummy_components()
pipe = self.pipeline_class(**components)
|
microsoft__ptvsd-806 | listen(0) in create_server() does not allow client to connect to linux server
## Environment data
- PTVSD version: 4.1.1
- OS and version: linux kernel 4.14.46
- Python version (& distribution if applicable, e.g. Anaconda): 2.7.13, 3.5.3
- Using VS Code or Visual Studio: N/A
## Actual behavior
server on linux never accepts connection, i.e. `client, _ = sock.accept()` in `connect()` in socket.py never returns. This is due to the `listen(0)` call in `create_server()`. This was changed from `listen(1)` in 322f6946. Although `listen(0)` does work correctly on mac, it does not on linux.
## Expected behavior
the incoming connection to be accepted
## Steps to reproduce:
run:
```python
ptvsd.enable_attach(address=('0.0.0.0', 9876), redirect_output=True)
ptvsd.wait_for_attach()
```
then from the command line, see that `telnet localhost 9876` hangs instead of connecting. some background history is at https://bugs.python.org/issue8498
| [
{
"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import absolute_import\n\nfrom collections import namedtuple\nimport contextlib\nimport errno\nimport socket\ntry:\n from... | [
{
"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import absolute_import\n\nfrom collections import namedtuple\nimport contextlib\nimport errno\nimport socket\ntry:\n from... | diff --git a/ptvsd/socket.py b/ptvsd/socket.py
index 49739b292..f5226bb52 100644
--- a/ptvsd/socket.py
+++ b/ptvsd/socket.py
@@ -75,7 +75,7 @@ def create_server(host, port):
host = 'localhost'
server = _new_sock()
server.bind((host, port))
- server.listen(0)
+ server.listen(1)
return server
|
jazzband__pip-tools-1871 | Convert the README from rst to md
<!--- Describe the changes here. --->
This PR converts the documentation from README.rst to README.md
Related: https://github.com/jazzband/pip-tools/issues/1856
##### Contributor checklist
- [ ] Provided the tests for the changes.
- [x] Assure PR title is short, clear, and good to be included in the user-oriented changelog
##### Maintainer checklist
- [ ] Assure one of these labels is present: `backwards incompatible`, `feature`, `enhancement`, `deprecation`, `bug`, `dependency`, `docs` or `skip-changelog` as they determine changelog listing.
- [ ] Assign the PR to an existing or new milestone for the target version (following [Semantic Versioning](https://blog.versioneye.com/2014/01/16/semantic-versioning/)).
| [
{
"content": "# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\"\"\"Configuration file for the Sphinx documentation builder.\"\"\"\n\nfrom __future__ import annotations\n\nfrom functools import partial\nfrom pathlib import Path\n\nfrom setuptools_scm import get_version\n\n# -- Path setup ------... | [
{
"content": "# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\"\"\"Configuration file for the Sphinx documentation builder.\"\"\"\n\nfrom __future__ import annotations\n\nfrom functools import partial\nfrom pathlib import Path\n\nfrom setuptools_scm import get_version\n\n# -- Path setup ------... | diff --git a/README.md b/README.md
new file mode 100644
index 000000000..c6267daae
--- /dev/null
+++ b/README.md
@@ -0,0 +1,583 @@
+[![jazzband-image]][jazzband]
+[![pypi][pypi-image]][pypi]
+[![pyversions][pyversions-image]][pyversions]
+[![pre-commit][pre-commit-image]][pre-commit]
+[![buildstatus-gha][buildstatus-gha-image]][buildstatus-gha]
+[![codecov][codecov-image]][codecov]
+
+# pip-tools = pip-compile + pip-sync
+
+A set of command line tools to help you keep your `pip`-based packages fresh,
+even when you've pinned them. You do pin them, right? (In building your Python application and its dependencies for production, you want to make sure that your builds are predictable and deterministic.)
+
+[![pip-tools overview for phase II][pip-tools-overview]][pip-tools-overview]
+
+## Installation
+
+Similar to `pip`, `pip-tools` must be installed in each of your project's
+[virtual environments](https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments):
+
+```console
+$ source /path/to/venv/bin/activate
+(venv) $ python -m pip install pip-tools
+```
+
+**Note**: all of the remaining example commands assume you've activated your
+project's virtual environment.
+
+## Example usage for `pip-compile`
+
+The `pip-compile` command lets you compile a `requirements.txt` file from
+your dependencies, specified in either `pyproject.toml`, `setup.cfg`,
+`setup.py`, or `requirements.in`.
+
+Run it with `pip-compile` or `python -m piptools compile`. If you use
+multiple Python versions, you can also run `py -X.Y -m piptools compile` on
+Windows and `pythonX.Y -m piptools compile` on other systems.
+
+`pip-compile` should be run from the same virtual environment as your
+project so conditional dependencies that require a specific Python version,
+or other environment markers, resolve relative to your project's
+environment.
+
+**Note**: If `pip-compile` finds an existing `requirements.txt` file that
+fulfils the dependencies then no changes will be made, even if updates are
+available. To compile from scratch, first delete the existing
+`requirements.txt` file, or see
+[Updating requirements](#updating-requirements)
+for alternative approaches.
+
+### Requirements from `pyproject.toml`
+
+The `pyproject.toml` file is the
+[latest standard](https://peps.python.org/pep-0621/) for configuring
+packages and applications, and is recommended for new projects. `pip-compile`
+supports both installing your `project.dependencies` as well as your
+`project.optional-dependencies`. Thanks to the fact that this is an
+official standard, you can use `pip-compile` to pin the dependencies
+in projects that use modern standards-adhering packaging tools like
+[Setuptools](https://setuptools.pypa.io), [Hatch](https://hatch.pypa.io/)
+or [flit](https://flit.pypa.io/).
+
+Suppose you have a 'foobar' Python application that is packaged using `Setuptools`,
+and you want to pin it for production. You can declare the project metadata as:
+
+```toml
+[build-system]
+requires = ["setuptools", "setuptools-scm"]
+build-backend = "setuptools.build_meta"
+
+[project]
+requires-python = ">=3.9"
+name = "foobar"
+dynamic = ["dependencies", "optional-dependencies"]
+
+[tool.setuptools.dynamic]
+dependencies = { file = ["requirements.in"] }
+optional-dependencies.test = { file = ["requirements-test.txt"] }
+
+```
+
+If you have a Django application that is packaged using `Hatch`, and you
+want to pin it for production. You also want to pin your development tools
+in a separate pin file. You declare `django` as a dependency and create an
+optional dependency `dev` that includes `pytest`:
+
+```toml
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "my-cool-django-app"
+version = "42"
+dependencies = ["django"]
+
+[project.optional-dependencies]
+dev = ["pytest"]
+
+```
+
+You can produce your pin files as easily as:
+
+```console
+$ pip-compile -o requirements.txt pyproject.toml
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile --output-file=requirements.txt pyproject.toml
+#
+asgiref==3.6.0
+ # via django
+django==4.1.7
+ # via my-cool-django-app (pyproject.toml)
+sqlparse==0.4.3
+ # via django
+
+$ pip-compile --extra dev -o dev-requirements.txt pyproject.toml
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile --extra=dev --output-file=dev-requirements.txt pyproject.toml
+#
+asgiref==3.6.0
+ # via django
+attrs==22.2.0
+ # via pytest
+django==4.1.7
+ # via my-cool-django-app (pyproject.toml)
+exceptiongroup==1.1.1
+ # via pytest
+iniconfig==2.0.0
+ # via pytest
+packaging==23.0
+ # via pytest
+pluggy==1.0.0
+ # via pytest
+pytest==7.2.2
+ # via my-cool-django-app (pyproject.toml)
+sqlparse==0.4.3
+ # via django
+tomli==2.0.1
+ # via pytest
+```
+
+This is great for both pinning your applications, but also to keep the CI
+of your open-source Python package stable.
+
+### Requirements from `setup.py` and `setup.cfg`
+
+`pip-compile` has also full support for `setup.py`- and
+`setup.cfg`-based projects that use `setuptools`.
+
+Just define your dependencies and extras as usual and run
+`pip-compile` as above.
+
+### Requirements from `requirements.in`
+
+You can also use plain text files for your requirements (e.g. if you don't
+want your application to be a package). To use a `requirements.in` file to
+declare the Django dependency:
+
+```
+# requirements.in
+django
+```
+
+Now, run `pip-compile requirements.in`:
+
+```console
+$ pip-compile requirements.in
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile requirements.in
+#
+asgiref==3.6.0
+ # via django
+django==4.1.7
+ # via -r requirements.in
+sqlparse==0.4.3
+ # via django
+```
+
+And it will produce your `requirements.txt`, with all the Django dependencies
+(and all underlying dependencies) pinned.
+
+(updating-requirements)=
+
+### Updating requirements
+
+`pip-compile` generates a `requirements.txt` file using the latest versions
+that fulfil the dependencies you specify in the supported files.
+
+If `pip-compile` finds an existing `requirements.txt` file that fulfils the
+dependencies then no changes will be made, even if updates are available.
+
+To force `pip-compile` to update all packages in an existing
+`requirements.txt`, run `pip-compile --upgrade`.
+
+To update a specific package to the latest or a specific version use the
+`--upgrade-package` or `-P` flag:
+
+```console
+# only update the django package
+$ pip-compile --upgrade-package django
+
+# update both the django and requests packages
+$ pip-compile --upgrade-package django --upgrade-package requests
+
+# update the django package to the latest, and requests to v2.0.0
+$ pip-compile --upgrade-package django --upgrade-package requests==2.0.0
+```
+
+You can combine `--upgrade` and `--upgrade-package` in one command, to
+provide constraints on the allowed upgrades. For example to upgrade all
+packages whilst constraining requests to the latest version less than 3.0:
+
+```console
+$ pip-compile --upgrade --upgrade-package 'requests<3.0'
+```
+
+### Using hashes
+
+If you would like to use _Hash-Checking Mode_ available in `pip` since
+version 8.0, `pip-compile` offers `--generate-hashes` flag:
+
+```console
+$ pip-compile --generate-hashes requirements.in
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile --generate-hashes requirements.in
+#
+asgiref==3.6.0 \
+ --hash=sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac \
+ --hash=sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506
+ # via django
+django==4.1.7 \
+ --hash=sha256:44f714b81c5f190d9d2ddad01a532fe502fa01c4cb8faf1d081f4264ed15dcd8 \
+ --hash=sha256:f2f431e75adc40039ace496ad3b9f17227022e8b11566f4b363da44c7e44761e
+ # via -r requirements.in
+sqlparse==0.4.3 \
+ --hash=sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34 \
+ --hash=sha256:69ca804846bb114d2ec380e4360a8a340db83f0ccf3afceeb1404df028f57268
+ # via django
+```
+
+### Output File
+
+To output the pinned requirements in a filename other than
+`requirements.txt`, use `--output-file`. This might be useful for compiling
+multiple files, for example with different constraints on django to test a
+library with both versions using [tox](https://tox.readthedocs.io/en/latest/):
+
+```console
+$ pip-compile --upgrade-package 'django<1.0' --output-file requirements-django0x.txt
+$ pip-compile --upgrade-package 'django<2.0' --output-file requirements-django1x.txt
+```
+
+Or to output to standard output, use `--output-file=-`:
+
+```console
+$ pip-compile --output-file=- > requirements.txt
+$ pip-compile - --output-file=- < requirements.in > requirements.txt
+```
+
+### Forwarding options to `pip`
+
+Any valid `pip` flags or arguments may be passed on with `pip-compile`'s
+`--pip-args` option, e.g.
+
+```console
+$ pip-compile requirements.in --pip-args "--retries 10 --timeout 30"
+```
+
+### Configuration
+
+You might be wrapping the `pip-compile` command in another script. To avoid
+confusing consumers of your custom script you can override the update command
+generated at the top of requirements files by setting the
+`CUSTOM_COMPILE_COMMAND` environment variable.
+
+```console
+$ CUSTOM_COMPILE_COMMAND="./pipcompilewrapper" pip-compile requirements.in
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# ./pipcompilewrapper
+#
+asgiref==3.6.0
+ # via django
+django==4.1.7
+ # via -r requirements.in
+sqlparse==0.4.3
+ # via django
+```
+
+### Workflow for layered requirements
+
+If you have different environments that you need to install different but
+compatible packages for, then you can create layered requirements files and use
+one layer to constrain the other.
+
+For example, if you have a Django project where you want the newest `2.1`
+release in production and when developing you want to use the Django debug
+toolbar, then you can create two `*.in` files, one for each layer:
+
+```
+# requirements.in
+django<2.2
+```
+
+At the top of the development requirements `dev-requirements.in` you use `-c
+requirements.txt` to constrain the dev requirements to packages already
+selected for production in `requirements.txt`.
+
+```
+# dev-requirements.in
+-c requirements.txt
+django-debug-toolbar<2.2
+```
+
+First, compile `requirements.txt` as usual:
+
+```
+$ pip-compile
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile
+#
+django==2.1.15
+ # via -r requirements.in
+pytz==2023.3
+ # via django
+```
+
+Now compile the dev requirements and the `requirements.txt` file is used as
+a constraint:
+
+```console
+$ pip-compile dev-requirements.in
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile dev-requirements.in
+#
+django==2.1.15
+ # via
+ # -c requirements.txt
+ # django-debug-toolbar
+django-debug-toolbar==2.1
+ # via -r dev-requirements.in
+pytz==2023.3
+ # via
+ # -c requirements.txt
+ # django
+sqlparse==0.4.3
+ # via django-debug-toolbar
+```
+
+As you can see above, even though a `2.2` release of Django is available, the
+dev requirements only include a `2.1` version of Django because they were
+constrained. Now both compiled requirements files can be installed safely in
+the dev environment.
+
+To install requirements in production stage use:
+
+```console
+$ pip-sync
+```
+
+You can install requirements in development stage by:
+
+```console
+$ pip-sync requirements.txt dev-requirements.txt
+```
+
+### Version control integration
+
+You might use `pip-compile` as a hook for the [pre-commit](https://github.com/pre-commit/pre-commit).
+See [pre-commit docs](https://pre-commit.com/) for instructions.
+Sample `.pre-commit-config.yaml`:
+
+```yaml
+repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 6.13.0
+ hooks:
+ - id: pip-compile
+```
+
+You might want to customize `pip-compile` args by configuring `args` and/or `files`, for example:
+
+```yaml
+repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 6.13.0
+ hooks:
+ - id: pip-compile
+ files: ^requirements/production\.(in|txt)$
+ args: [--index-url=https://example.com, requirements/production.in]
+```
+
+If you have multiple requirement files make sure you create a hook for each file.
+
+```yaml
+repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 6.13.0
+ hooks:
+ - id: pip-compile
+ name: pip-compile setup.py
+ files: ^(setup\.py|requirements\.txt)$
+ - id: pip-compile
+ name: pip-compile requirements-dev.in
+ args: [requirements-dev.in]
+ files: ^requirements-dev\.(in|txt)$
+ - id: pip-compile
+ name: pip-compile requirements-lint.in
+ args: [requirements-lint.in]
+ files: ^requirements-lint\.(in|txt)$
+ - id: pip-compile
+ name: pip-compile requirements.in
+ args: [requirements.in]
+ files: ^requirements\.(in|txt)$
+```
+
+### Example usage for `pip-sync`
+
+Now that you have a `requirements.txt`, you can use `pip-sync` to update
+your virtual environment to reflect exactly what's in there. This will
+install/upgrade/uninstall everything necessary to match the
+`requirements.txt` contents.
+
+Run it with `pip-sync` or `python -m piptools sync`. If you use multiple
+Python versions, you can also run `py -X.Y -m piptools sync` on Windows and
+`pythonX.Y -m piptools sync` on other systems.
+
+`pip-sync` must be installed into and run from the same virtual
+environment as your project to identify which packages to install
+or upgrade.
+
+**Be careful**: `pip-sync` is meant to be used only with a
+`requirements.txt` generated by `pip-compile`.
+
+```console
+$ pip-sync
+Uninstalling flake8-2.4.1:
+ Successfully uninstalled flake8-2.4.1
+Collecting click==4.1
+ Downloading click-4.1-py2.py3-none-any.whl (62kB)
+ 100% |................................| 65kB 1.8MB/s
+ Found existing installation: click 4.0
+ Uninstalling click-4.0:
+ Successfully uninstalled click-4.0
+Successfully installed click-4.1
+```
+
+To sync multiple `*.txt` dependency lists, just pass them in via command
+line arguments, e.g.
+
+```console
+$ pip-sync dev-requirements.txt requirements.txt
+```
+
+Passing in empty arguments would cause it to default to `requirements.txt`.
+
+Any valid `pip install` flags or arguments may be passed with `pip-sync`'s
+`--pip-args` option, e.g.
+
+```console
+$ pip-sync requirements.txt --pip-args "--no-cache-dir --no-deps"
+```
+
+**Note**: `pip-sync` will not upgrade or uninstall packaging tools like
+`setuptools`, `pip`, or `pip-tools` itself. Use `python -m pip install --upgrade`
+to upgrade those packages.
+
+### Should I commit `requirements.in` and `requirements.txt` to source control?
+
+Generally, yes. If you want a reproducible environment installation available from your source control,
+then yes, you should commit both `requirements.in` and `requirements.txt` to source control.
+
+Note that if you are deploying on multiple Python environments (read the section below),
+then you must commit a separate output file for each Python environment.
+We suggest to use the `{env}-requirements.txt` format
+(ex: `win32-py3.7-requirements.txt`, `macos-py3.10-requirements.txt`, etc.).
+
+### Cross-environment usage of `requirements.in`/`requirements.txt` and `pip-compile`
+
+The dependencies of a package can change depending on the Python environment in which it
+is installed. Here, we define a Python environment as the combination of Operating
+System, Python version (3.7, 3.8, etc.), and Python implementation (CPython, PyPy,
+etc.). For an exact definition, refer to the possible combinations of [PEP 508
+environment markers][environment-markers].
+
+As the resulting `requirements.txt` can differ for each environment, users must
+execute `pip-compile` **on each Python environment separately** to generate a
+`requirements.txt` valid for each said environment. The same `requirements.in` can
+be used as the source file for all environments, using
+[PEP 508 environment markers][environment-markers] as
+needed, the same way it would be done for regular `pip` cross-environment usage.
+
+If the generated `requirements.txt` remains exactly the same for all Python
+environments, then it can be used across Python environments safely. **But** users
+should be careful as any package update can introduce environment-dependent
+dependencies, making any newly generated `requirements.txt` environment-dependent too.
+As a general rule, it's advised that users should still always execute `pip-compile`
+on each targeted Python environment to avoid issues.
+
+### Other useful tools
+
+- [pipdeptree](https://github.com/tox-dev/pipdeptree) to print the dependency tree of the installed packages.
+- `requirements.in`/`requirements.txt` syntax highlighting:
+
+ - [requirements.txt.vim](https://github.com/raimon49/requirements.txt.vim) for Vim.
+ - [Python extension for VS Code](https://marketplace.visualstudio.com/items?itemName=ms-python.python) for VS Code.
+ - [pip-requirements.el](https://github.com/Wilfred/pip-requirements.el) for Emacs.
+
+### Deprecations
+
+This section lists `pip-tools` features that are currently deprecated.
+
+- In future versions, the `--allow-unsafe` behavior will be enabled by
+ default. Use `--no-allow-unsafe` to keep the old behavior. It is
+ recommended to pass the `--allow-unsafe` now to adapt to the upcoming
+ change.
+- Legacy resolver is deprecated and will be removed in future versions.
+ Use `--resolver=backtracking` instead.
+
+### A Note on Resolvers
+
+You can choose from either the legacy or the backtracking resolver.
+The backtracking resolver is recommended, and will become the default
+with the 7.0 release.
+
+Use it now with the `--resolver=backtracking` option to `pip-compile`.
+
+The legacy resolver will occasionally fail to resolve dependencies. The
+backtracking resolver is more robust, but can take longer to run in
+general.
+
+You can continue using the legacy resolver with `--resolver=legacy`.
+
+### Versions and compatibility
+
+The table below summarizes the latest `pip-tools` versions with the required
+`pip` and Python versions. Generally, `pip-tools` supports the same Python
+versions as the required `pip` versions.
+
+| pip-tools | pip | Python |
+| -------------- | -------------- | -------------- |
+| 4.5.\* | 8.1.3 - 20.0.2 | 2.7, 3.5 - 3.8 |
+| 5.0.0 - 5.3.0 | 20.0 - 20.1.1 | 2.7, 3.5 - 3.8 |
+| 5.4.0 | 20.1 - 20.3.\* | 2.7, 3.5 - 3.8 |
+| 5.5.0 | 20.1 - 20.3.\* | 2.7, 3.5 - 3.9 |
+| 6.0.0 - 6.3.1 | 20.3 - 21.2.\* | 3.6 - 3.9 |
+| 6.4.0 | 21.2 - 21.3.\* | 3.6 - 3.10 |
+| 6.5.0 - 6.10.0 | 21.2 - 22.3.\* | 3.7 - 3.11 |
+| 6.11.0+ | 22.2+ | 3.7 - 3.11 |
+
+[jazzband]: https://jazzband.co/
+[jazzband-image]: https://jazzband.co/static/img/badge.svg
+[pypi]: https://pypi.org/project/pip-tools/
+[pypi-image]: https://img.shields.io/pypi/v/pip-tools.svg
+[pyversions]: https://pypi.org/project/pip-tools/
+[pyversions-image]: https://img.shields.io/pypi/pyversions/pip-tools.svg
+[pre-commit]: https://results.pre-commit.ci/latest/github/jazzband/pip-tools/main
+[pre-commit-image]: https://results.pre-commit.ci/badge/github/jazzband/pip-tools/main.svg
+[buildstatus-gha]: https://github.com/jazzband/pip-tools/actions?query=workflow%3ACI
+[buildstatus-gha-image]: https://github.com/jazzband/pip-tools/workflows/CI/badge.svg
+[codecov]: https://codecov.io/gh/jazzband/pip-tools
+[codecov-image]: https://codecov.io/gh/jazzband/pip-tools/branch/main/graph/badge.svg
+[pip-tools-overview]: https://github.com/jazzband/pip-tools/raw/main/img/pip-tools-overview.svg
+[environment-markers]: https://peps.python.org/pep-0508/#environment-markers
diff --git a/README.rst b/README.rst
deleted file mode 100644
index d88b75e0b..000000000
--- a/README.rst
+++ /dev/null
@@ -1,624 +0,0 @@
-|jazzband| |pypi| |pyversions| |pre-commit| |buildstatus-gha| |codecov|
-
-==================================
-pip-tools = pip-compile + pip-sync
-==================================
-
-A set of command line tools to help you keep your ``pip``-based packages fresh,
-even when you've pinned them. You do pin them, right? (In building your Python application and its dependencies for production, you want to make sure that your builds are predictable and deterministic.)
-
-.. image:: https://github.com/jazzband/pip-tools/raw/main/img/pip-tools-overview.svg
- :alt: pip-tools overview for phase II
-
-.. |buildstatus-gha| image:: https://github.com/jazzband/pip-tools/workflows/CI/badge.svg
- :alt: GitHub Actions build status
- :target: https://github.com/jazzband/pip-tools/actions?query=workflow%3ACI
-.. |codecov| image:: https://codecov.io/gh/jazzband/pip-tools/branch/main/graph/badge.svg
- :alt: Coverage
- :target: https://codecov.io/gh/jazzband/pip-tools
-.. |jazzband| image:: https://jazzband.co/static/img/badge.svg
- :alt: Jazzband
- :target: https://jazzband.co/
-.. |pre-commit| image:: https://results.pre-commit.ci/badge/github/jazzband/pip-tools/main.svg
- :alt: pre-commit.ci status
- :target: https://results.pre-commit.ci/latest/github/jazzband/pip-tools/main
-.. |pypi| image:: https://img.shields.io/pypi/v/pip-tools.svg
- :alt: PyPI version
- :target: https://pypi.org/project/pip-tools/
-.. |pyversions| image:: https://img.shields.io/pypi/pyversions/pip-tools.svg
- :alt: Supported Python versions
- :target: https://pypi.org/project/pip-tools/
-.. _You do pin them, right?: https://nvie.com/posts/pin-your-packages/
-
-Installation
-============
-
-Similar to ``pip``, ``pip-tools`` must be installed in each of your project's
-`virtual environments`_:
-
-.. code-block:: bash
-
- $ source /path/to/venv/bin/activate
- (venv) $ python -m pip install pip-tools
-
-**Note**: all of the remaining example commands assume you've activated your
-project's virtual environment.
-
-.. _virtual environments: https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments
-
-Example usage for ``pip-compile``
-=================================
-
-The ``pip-compile`` command lets you compile a ``requirements.txt`` file from
-your dependencies, specified in either ``pyproject.toml``, ``setup.cfg``,
-``setup.py``, or ``requirements.in``.
-
-Run it with ``pip-compile`` or ``python -m piptools compile``. If you use
-multiple Python versions, you can also run ``py -X.Y -m piptools compile`` on
-Windows and ``pythonX.Y -m piptools compile`` on other systems.
-
-``pip-compile`` should be run from the same virtual environment as your
-project so conditional dependencies that require a specific Python version,
-or other environment markers, resolve relative to your project's
-environment.
-
-**Note**: If ``pip-compile`` finds an existing ``requirements.txt`` file that
-fulfils the dependencies then no changes will be made, even if updates are
-available. To compile from scratch, first delete the existing
-``requirements.txt`` file, or see `Updating requirements`_ for alternative
-approaches.
-
-Requirements from ``pyproject.toml``
-------------------------------------
-
-The ``pyproject.toml`` file is the
-`latest standard <https://peps.python.org/pep-0621/>`_ for configuring
-packages and applications, and is recommended for new projects. ``pip-compile``
-supports both installing your ``project.dependencies`` as well as your
-``project.optional-dependencies``. Thanks to the fact that this is an
-official standard, you can use ``pip-compile`` to pin the dependencies
-in projects that use modern standards-adhering packaging tools like
-`Setuptools <https://setuptools.pypa.io>`_ , `Hatch <https://hatch.pypa.io/>`_
-or `flit <https://flit.pypa.io/>`_.
-
-Suppose you have a 'foobar' Python application that is packaged using ``Setuptools``,
-and you want to pin it for production. You can declare the project metadata as:
-
-.. code-block:: toml
-
- [build-system]
- requires = ["setuptools", "setuptools-scm"]
- build-backend = "setuptools.build_meta"
-
- [project]
- requires-python = ">=3.9"
- name = "foobar"
- dynamic = ["dependencies", "optional-dependencies"]
-
- [tool.setuptools.dynamic]
- dependencies = { file = ["requirements.in"] }
- optional-dependencies.test = { file = ["requirements-test.txt"] }
-
-If you have a Django application that is packaged using ``Hatch``, and you
-want to pin it for production. You also want to pin your development tools
-in a separate pin file. You declare ``django`` as a dependency and create an
-optional dependency ``dev`` that includes ``pytest``:
-
-.. code-block:: toml
-
- [build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
-
- [project]
- name = "my-cool-django-app"
- version = "42"
- dependencies = ["django"]
-
- [project.optional-dependencies]
- dev = ["pytest"]
-
-You can produce your pin files as easily as:
-
-.. code-block:: console
-
- $ pip-compile -o requirements.txt pyproject.toml
- #
- # This file is autogenerated by pip-compile with Python 3.10
- # by the following command:
- #
- # pip-compile --output-file=requirements.txt pyproject.toml
- #
- asgiref==3.6.0
- # via django
- django==4.1.7
- # via my-cool-django-app (pyproject.toml)
- sqlparse==0.4.3
- # via django
-
- $ pip-compile --extra dev -o dev-requirements.txt pyproject.toml
- #
- # This file is autogenerated by pip-compile with Python 3.10
- # by the following command:
- #
- # pip-compile --extra=dev --output-file=dev-requirements.txt pyproject.toml
- #
- asgiref==3.6.0
- # via django
- attrs==22.2.0
- # via pytest
- django==4.1.7
- # via my-cool-django-app (pyproject.toml)
- exceptiongroup==1.1.1
- # via pytest
- iniconfig==2.0.0
- # via pytest
- packaging==23.0
- # via pytest
- pluggy==1.0.0
- # via pytest
- pytest==7.2.2
- # via my-cool-django-app (pyproject.toml)
- sqlparse==0.4.3
- # via django
- tomli==2.0.1
- # via pytest
-
-This is great for both pinning your applications, but also to keep the CI
-of your open-source Python package stable.
-
-Requirements from ``setup.py`` and ``setup.cfg``
-------------------------------------------------
-
-``pip-compile`` has also full support for ``setup.py``- and
-``setup.cfg``-based projects that use ``setuptools``.
-
-Just define your dependencies and extras as usual and run
-``pip-compile`` as above.
-
-Requirements from ``requirements.in``
--------------------------------------
-
-You can also use plain text files for your requirements (e.g. if you don't
-want your application to be a package). To use a ``requirements.in`` file to
-declare the Django dependency:
-
-.. code-block:: ini
-
- # requirements.in
- django
-
-Now, run ``pip-compile requirements.in``:
-
-.. code-block:: bash
-
- $ pip-compile requirements.in
- #
- # This file is autogenerated by pip-compile with Python 3.10
- # by the following command:
- #
- # pip-compile requirements.in
- #
- asgiref==3.6.0
- # via django
- django==4.1.7
- # via -r requirements.in
- sqlparse==0.4.3
- # via django
-
-And it will produce your ``requirements.txt``, with all the Django dependencies
-(and all underlying dependencies) pinned.
-
-.. _Updating requirements:
-
-Updating requirements
----------------------
-
-``pip-compile`` generates a ``requirements.txt`` file using the latest versions
-that fulfil the dependencies you specify in the supported files.
-
-If ``pip-compile`` finds an existing ``requirements.txt`` file that fulfils the
-dependencies then no changes will be made, even if updates are available.
-
-To force ``pip-compile`` to update all packages in an existing
-``requirements.txt``, run ``pip-compile --upgrade``.
-
-To update a specific package to the latest or a specific version use the
-``--upgrade-package`` or ``-P`` flag:
-
-.. code-block:: bash
-
- # only update the django package
- $ pip-compile --upgrade-package django
-
- # update both the django and requests packages
- $ pip-compile --upgrade-package django --upgrade-package requests
-
- # update the django package to the latest, and requests to v2.0.0
- $ pip-compile --upgrade-package django --upgrade-package requests==2.0.0
-
-You can combine ``--upgrade`` and ``--upgrade-package`` in one command, to
-provide constraints on the allowed upgrades. For example to upgrade all
-packages whilst constraining requests to the latest version less than 3.0:
-
-.. code-block:: bash
-
- $ pip-compile --upgrade --upgrade-package 'requests<3.0'
-
-Using hashes
-------------
-
-If you would like to use *Hash-Checking Mode* available in ``pip`` since
-version 8.0, ``pip-compile`` offers ``--generate-hashes`` flag:
-
-.. code-block:: bash
-
- $ pip-compile --generate-hashes requirements.in
- #
- # This file is autogenerated by pip-compile with Python 3.10
- # by the following command:
- #
- # pip-compile --generate-hashes requirements.in
- #
- asgiref==3.6.0 \
- --hash=sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac \
- --hash=sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506
- # via django
- django==4.1.7 \
- --hash=sha256:44f714b81c5f190d9d2ddad01a532fe502fa01c4cb8faf1d081f4264ed15dcd8 \
- --hash=sha256:f2f431e75adc40039ace496ad3b9f17227022e8b11566f4b363da44c7e44761e
- # via -r requirements.in
- sqlparse==0.4.3 \
- --hash=sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34 \
- --hash=sha256:69ca804846bb114d2ec380e4360a8a340db83f0ccf3afceeb1404df028f57268
- # via django
-
-Output File
------------
-
-To output the pinned requirements in a filename other than
-``requirements.txt``, use ``--output-file``. This might be useful for compiling
-multiple files, for example with different constraints on django to test a
-library with both versions using `tox <https://tox.readthedocs.io/en/latest/>`__:
-
-.. code-block:: bash
-
- $ pip-compile --upgrade-package 'django<1.0' --output-file requirements-django0x.txt
- $ pip-compile --upgrade-package 'django<2.0' --output-file requirements-django1x.txt
-
-Or to output to standard output, use ``--output-file=-``:
-
-.. code-block:: bash
-
- $ pip-compile --output-file=- > requirements.txt
- $ pip-compile - --output-file=- < requirements.in > requirements.txt
-
-Forwarding options to ``pip``
------------------------------
-
-Any valid ``pip`` flags or arguments may be passed on with ``pip-compile``'s
-``--pip-args`` option, e.g.
-
-.. code-block:: bash
-
- $ pip-compile requirements.in --pip-args "--retries 10 --timeout 30"
-
-Configuration
--------------
-
-You might be wrapping the ``pip-compile`` command in another script. To avoid
-confusing consumers of your custom script you can override the update command
-generated at the top of requirements files by setting the
-``CUSTOM_COMPILE_COMMAND`` environment variable.
-
-.. code-block:: bash
-
- $ CUSTOM_COMPILE_COMMAND="./pipcompilewrapper" pip-compile requirements.in
- #
- # This file is autogenerated by pip-compile with Python 3.10
- # by the following command:
- #
- # ./pipcompilewrapper
- #
- asgiref==3.6.0
- # via django
- django==4.1.7
- # via -r requirements.in
- sqlparse==0.4.3
- # via django
-
-Workflow for layered requirements
----------------------------------
-
-If you have different environments that you need to install different but
-compatible packages for, then you can create layered requirements files and use
-one layer to constrain the other.
-
-For example, if you have a Django project where you want the newest ``2.1``
-release in production and when developing you want to use the Django debug
-toolbar, then you can create two ``*.in`` files, one for each layer:
-
-.. code-block:: ini
-
- # requirements.in
- django<2.2
-
-At the top of the development requirements ``dev-requirements.in`` you use ``-c
-requirements.txt`` to constrain the dev requirements to packages already
-selected for production in ``requirements.txt``.
-
-.. code-block:: ini
-
- # dev-requirements.in
- -c requirements.txt
- django-debug-toolbar<2.2
-
-First, compile ``requirements.txt`` as usual:
-
-.. code-block:: bash
-
- $ pip-compile
- #
- # This file is autogenerated by pip-compile with Python 3.10
- # by the following command:
- #
- # pip-compile
- #
- django==2.1.15
- # via -r requirements.in
- pytz==2023.3
- # via django
-
-
-Now compile the dev requirements and the ``requirements.txt`` file is used as
-a constraint:
-
-.. code-block:: bash
-
- $ pip-compile dev-requirements.in
- #
- # This file is autogenerated by pip-compile with Python 3.10
- # by the following command:
- #
- # pip-compile dev-requirements.in
- #
- django==2.1.15
- # via
- # -c requirements.txt
- # django-debug-toolbar
- django-debug-toolbar==2.1
- # via -r dev-requirements.in
- pytz==2023.3
- # via
- # -c requirements.txt
- # django
- sqlparse==0.4.3
- # via django-debug-toolbar
-
-As you can see above, even though a ``2.2`` release of Django is available, the
-dev requirements only include a ``2.1`` version of Django because they were
-constrained. Now both compiled requirements files can be installed safely in
-the dev environment.
-
-To install requirements in production stage use:
-
-.. code-block:: bash
-
- $ pip-sync
-
-You can install requirements in development stage by:
-
-.. code-block:: bash
-
- $ pip-sync requirements.txt dev-requirements.txt
-
-
-Version control integration
----------------------------
-
-You might use ``pip-compile`` as a hook for the `pre-commit <https://github.com/pre-commit/pre-commit>`_.
-See `pre-commit docs <https://pre-commit.com/>`_ for instructions.
-Sample ``.pre-commit-config.yaml``:
-
-.. code-block:: yaml
-
- repos:
- - repo: https://github.com/jazzband/pip-tools
- rev: 6.13.0
- hooks:
- - id: pip-compile
-
-You might want to customize ``pip-compile`` args by configuring ``args`` and/or ``files``, for example:
-
-.. code-block:: yaml
-
- repos:
- - repo: https://github.com/jazzband/pip-tools
- rev: 6.13.0
- hooks:
- - id: pip-compile
- files: ^requirements/production\.(in|txt)$
- args: [--index-url=https://example.com, requirements/production.in]
-
-If you have multiple requirement files make sure you create a hook for each file.
-
-.. code-block:: yaml
-
- repos:
- - repo: https://github.com/jazzband/pip-tools
- rev: 6.13.0
- hooks:
- - id: pip-compile
- name: pip-compile setup.py
- files: ^(setup\.py|requirements\.txt)$
- - id: pip-compile
- name: pip-compile requirements-dev.in
- args: [requirements-dev.in]
- files: ^requirements-dev\.(in|txt)$
- - id: pip-compile
- name: pip-compile requirements-lint.in
- args: [requirements-lint.in]
- files: ^requirements-lint\.(in|txt)$
- - id: pip-compile
- name: pip-compile requirements.in
- args: [requirements.in]
- files: ^requirements\.(in|txt)$
-
-
-Example usage for ``pip-sync``
-==============================
-
-Now that you have a ``requirements.txt``, you can use ``pip-sync`` to update
-your virtual environment to reflect exactly what's in there. This will
-install/upgrade/uninstall everything necessary to match the
-``requirements.txt`` contents.
-
-Run it with ``pip-sync`` or ``python -m piptools sync``. If you use multiple
-Python versions, you can also run ``py -X.Y -m piptools sync`` on Windows and
-``pythonX.Y -m piptools sync`` on other systems.
-
-``pip-sync`` must be installed into and run from the same virtual
-environment as your project to identify which packages to install
-or upgrade.
-
-**Be careful**: ``pip-sync`` is meant to be used only with a
-``requirements.txt`` generated by ``pip-compile``.
-
-.. code-block:: bash
-
- $ pip-sync
- Uninstalling flake8-2.4.1:
- Successfully uninstalled flake8-2.4.1
- Collecting click==4.1
- Downloading click-4.1-py2.py3-none-any.whl (62kB)
- 100% |................................| 65kB 1.8MB/s
- Found existing installation: click 4.0
- Uninstalling click-4.0:
- Successfully uninstalled click-4.0
- Successfully installed click-4.1
-
-To sync multiple ``*.txt`` dependency lists, just pass them in via command
-line arguments, e.g.
-
-.. code-block:: bash
-
- $ pip-sync dev-requirements.txt requirements.txt
-
-Passing in empty arguments would cause it to default to ``requirements.txt``.
-
-Any valid ``pip install`` flags or arguments may be passed with ``pip-sync``'s
-``--pip-args`` option, e.g.
-
-.. code-block:: bash
-
- $ pip-sync requirements.txt --pip-args "--no-cache-dir --no-deps"
-
-**Note**: ``pip-sync`` will not upgrade or uninstall packaging tools like
-``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``python -m pip install --upgrade``
-to upgrade those packages.
-
-Should I commit ``requirements.in`` and ``requirements.txt`` to source control?
-===============================================================================
-
-Generally, yes. If you want a reproducible environment installation available from your source control,
-then yes, you should commit both ``requirements.in`` and ``requirements.txt`` to source control.
-
-Note that if you are deploying on multiple Python environments (read the section below),
-then you must commit a separate output file for each Python environment.
-We suggest to use the ``{env}-requirements.txt`` format
-(ex: ``win32-py3.7-requirements.txt``, ``macos-py3.10-requirements.txt``, etc.).
-
-
-Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-compile``
-=======================================================================================
-
-The dependencies of a package can change depending on the Python environment in which it
-is installed. Here, we define a Python environment as the combination of Operating
-System, Python version (3.7, 3.8, etc.), and Python implementation (CPython, PyPy,
-etc.). For an exact definition, refer to the possible combinations of `PEP 508
-environment markers`_.
-
-As the resulting ``requirements.txt`` can differ for each environment, users must
-execute ``pip-compile`` **on each Python environment separately** to generate a
-``requirements.txt`` valid for each said environment. The same ``requirements.in`` can
-be used as the source file for all environments, using `PEP 508 environment markers`_ as
-needed, the same way it would be done for regular ``pip`` cross-environment usage.
-
-If the generated ``requirements.txt`` remains exactly the same for all Python
-environments, then it can be used across Python environments safely. **But** users
-should be careful as any package update can introduce environment-dependent
-dependencies, making any newly generated ``requirements.txt`` environment-dependent too.
-As a general rule, it's advised that users should still always execute ``pip-compile``
-on each targeted Python environment to avoid issues.
-
-.. _PEP 508 environment markers: https://www.python.org/dev/peps/pep-0508/#environment-markers
-
-Other useful tools
-==================
-
-- `pipdeptree`_ to print the dependency tree of the installed packages.
-- ``requirements.in``/``requirements.txt`` syntax highlighting:
-
- * `requirements.txt.vim`_ for Vim.
- * `Python extension for VS Code`_ for VS Code.
- * `pip-requirements.el`_ for Emacs.
-
-.. _pipdeptree: https://github.com/naiquevin/pipdeptree
-.. _requirements.txt.vim: https://github.com/raimon49/requirements.txt.vim
-.. _Python extension for VS Code: https://marketplace.visualstudio.com/items?itemName=ms-python.python
-.. _pip-requirements.el: https://github.com/Wilfred/pip-requirements.el
-
-
-Deprecations
-============
-
-This section lists ``pip-tools`` features that are currently deprecated.
-
-- In future versions, the ``--allow-unsafe`` behavior will be enabled by
- default. Use ``--no-allow-unsafe`` to keep the old behavior. It is
- recommended to pass the ``--allow-unsafe`` now to adapt to the upcoming
- change.
-- Legacy resolver is deprecated and will be removed in future versions.
- Use ``--resolver=backtracking`` instead.
-
-A Note on Resolvers
-===================
-
-You can choose from either the legacy or the backtracking resolver.
-The backtracking resolver is recommended, and will become the default
-with the 7.0 release.
-
-Use it now with the ``--resolver=backtracking`` option to ``pip-compile``.
-
-The legacy resolver will occasionally fail to resolve dependencies. The
-backtracking resolver is more robust, but can take longer to run in
-general.
-
-You can continue using the legacy resolver with ``--resolver=legacy``.
-
-Versions and compatibility
-==========================
-
-The table below summarizes the latest ``pip-tools`` versions with the required
-``pip`` and Python versions. Generally, ``pip-tools`` supports the same Python
-versions as the required ``pip`` versions.
-
-+----------------+----------------+----------------+
-| pip-tools | pip | Python |
-+================+================+================+
-| 4.5.* | 8.1.3 - 20.0.2 | 2.7, 3.5 - 3.8 |
-+----------------+----------------+----------------+
-| 5.0.0 - 5.3.0 | 20.0 - 20.1.1 | 2.7, 3.5 - 3.8 |
-+----------------+----------------+----------------+
-| 5.4.0 | 20.1 - 20.3.* | 2.7, 3.5 - 3.8 |
-+----------------+----------------+----------------+
-| 5.5.0 | 20.1 - 20.3.* | 2.7, 3.5 - 3.9 |
-+----------------+----------------+----------------+
-| 6.0.0 - 6.3.1 | 20.3 - 21.2.* | 3.6 - 3.9 |
-+----------------+----------------+----------------+
-| 6.4.0 | 21.2 - 21.3.* | 3.6 - 3.10 |
-+----------------+----------------+----------------+
-| 6.5.0 - 6.10.0 | 21.2 - 22.3.* | 3.7 - 3.11 |
-+----------------+----------------+----------------+
-| 6.11.0+ | 22.2+ | 3.7 - 3.11 |
-+----------------+----------------+----------------+
diff --git a/docs/conf.py b/docs/conf.py
index e2e39ccdb..c8f91cf22 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -52,3 +52,4 @@
# -------------------------------------------------------------------------
default_role = "any"
nitpicky = True
+suppress_warnings = ["myst.xref_missing"]
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 000000000..570ca947b
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,14 @@
+# Welcome to pip-tools' documentation!
+
+```{include} ../README.md
+
+```
+
+```{toctree}
+:hidden:
+:maxdepth: 2
+:caption: Contents
+
+contributing
+changelog
+```
diff --git a/docs/index.rst b/docs/index.rst
deleted file mode 100644
index fb0631f5e..000000000
--- a/docs/index.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-.. pip-tools documentation master file, created by
- sphinx-quickstart on Tue Jun 22 00:43:50 2021.
- You can adapt this file completely to your liking, but it should at least
- contain the root `toctree` directive.
-
-====================================
-Welcome to pip-tools' documentation!
-====================================
-
-.. include:: ../README.rst
-
-.. toctree::
- :hidden:
- :maxdepth: 2
- :caption: Contents:
-
- contributing.md
- changelog.md
-
-
-Indices and tables
-==================
-
-* :ref:`genindex`
-* :ref:`modindex`
-* :ref:`search`
diff --git a/pyproject.toml b/pyproject.toml
index 4a07c5a9f..8c061c9ad 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -8,7 +8,7 @@ requires-python = ">=3.7"
dynamic = ["version"]
name = "pip-tools"
description = "pip-tools keeps your pinned dependencies fresh."
-readme = "README.rst"
+readme = "README.md"
authors = [{ "name" = "Vincent Driessen", "email" = "me@nvie.com" }]
license = { text = "BSD" }
classifiers = [
|
TencentBlueKing__bk-user-164 | 部门查询接口 ?lookup_field=name,当部门名称中含有 "." 时返回 404
**用文字描述你遇到的问题**
请用简练的文字描述你遇到的问题,问题描述的清晰程度决定了问题被解决的效率。
**重现方法**
1. 创建一个目录,名字包含点,如【广东省.深圳市】
2. 使用api查询, http:://{host:port}/api/v2/departments/广东省.深圳市/?lookup_field=name
查询结果是404
请描述问题重现的方法,如果不方便描述,可以通过截图或者视频辅助。
**预期行为**
预期的正常行为
**版本**
- 提供用户管理的具体版本号
- 是否是企业版问题?
**如果是 SaaS 页面问题,请提供使用的操作系统和浏览器信息**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**额外信息**
任何你觉得有助于问题解决的内容
| [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"\nTencentBlueKing is pleased to support the open source community by making 蓝鲸智云-用户管理(Bk-User) available.\nCopyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.\nLicensed under the MIT License (the \"License\"); you may not use this file ex... | [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"\nTencentBlueKing is pleased to support the open source community by making 蓝鲸智云-用户管理(Bk-User) available.\nCopyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.\nLicensed under the MIT License (the \"License\"); you may not use this file ex... | diff --git a/src/api/bkuser_core/departments/urls.py b/src/api/bkuser_core/departments/urls.py
index ce5aa3007..14c81ca61 100644
--- a/src/api/bkuser_core/departments/urls.py
+++ b/src/api/bkuser_core/departments/urls.py
@@ -13,7 +13,7 @@
from . import views
-PVAR_DEPARTMENT_ID = r"(?P<%s>[\w\-]+)" % LOOKUP_FIELD_NAME
+PVAR_DEPARTMENT_ID = r"(?P<%s>[\w\-\.]+)" % LOOKUP_FIELD_NAME
urlpatterns = [
url(
|
pwr-Solaar__Solaar-730 | better identification of Solaar versions
`git describe` produces
0.9.2-339-g39791be
Instead it should produce something based on 1.0.1
`git describe --tags` produces
1.0.1-58-g39791be
which is much better.
I think that all that is required is to upgrade the 1.0.1 tag that already exists.
| [
{
"content": "# -*- python-mode -*-\n# -*- coding: UTF-8 -*-\n\n## Copyright (C) 2012-2013 Daniel Pavel\n##\n## This program is free software; you can redistribute it and/or modify\n## it under the terms of the GNU General Public License as published by\n## the Free Software Foundation; either version 2 of the... | [
{
"content": "# -*- python-mode -*-\n# -*- coding: UTF-8 -*-\n\n## Copyright (C) 2012-2013 Daniel Pavel\n##\n## This program is free software; you can redistribute it and/or modify\n## it under the terms of the GNU General Public License as published by\n## the Free Software Foundation; either version 2 of the... | diff --git a/docs/_config.yml b/docs/_config.yml
index c40bfa750d..fe499a7775 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -4,7 +4,7 @@ tagline: Linux Device manager for the Logitech Unifying Receiver.
owner: pwr-Solaar
owner_url: https://github.com/pwr-Solaar
repository: pwr-Solaar/Solaar
-version: 1.0.1
+version: 1.0.2-rc1
show_downloads: false
encoding: utf-8
theme: jekyll-theme-slate
\ No newline at end of file
diff --git a/lib/solaar/__init__.py b/lib/solaar/__init__.py
index 14131450ea..5b6fa80982 100644
--- a/lib/solaar/__init__.py
+++ b/lib/solaar/__init__.py
@@ -19,5 +19,5 @@
from __future__ import absolute_import, division, print_function, unicode_literals
-__version__ = '1.0.1'
+__version__ = '1.0.2-rc1'
NAME = 'Solaar'
|
apache__airflow-18209 | Upgrade `importlib-resources` version
### Description
The version for `importlib-resources` constraint sets it to be [v1.5.0](https://github.com/python/importlib_resources/tree/v1.5.0) which is over a year old. For compatibility sake (for instance with something like Datapane) I would suggest upgrading it.
### Use case/motivation
Upgrade a an old dependency to keep code up to date.
### Related issues
Not that I am aware of, maybe somewhat #12120, or #15991.
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md)
| [
{
"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (th... | [
{
"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (th... | diff --git a/setup.cfg b/setup.cfg
index 4d2ee284f9bcf..10d7b7d713243 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -111,7 +111,7 @@ install_requires =
gunicorn>=20.1.0
httpx
importlib_metadata>=1.7;python_version<"3.9"
- importlib_resources~=1.4
+ importlib_resources~=5.0
# Required by vendored-in connexion
inflection>=0.3.1
iso8601>=0.1.12
diff --git a/setup.py b/setup.py
index e64d0ce7ff0dd..e392e288b4186 100644
--- a/setup.py
+++ b/setup.py
@@ -512,7 +512,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version
'freezegun',
'github3.py',
'gitpython',
- 'importlib-resources~=1.4',
+ 'importlib-resources~=5.0',
'ipdb',
'jira',
'jsondiff',
|
readthedocs__readthedocs.org-5424 | Remove possibel unused constant
At first sight looks like isn't used anymore after https://github.com/rtfd/readthedocs.org/pull/5383
https://github.com/rtfd/readthedocs.org/blob/78c34c904b347110b2cd545b4b5a80ed526590f7/readthedocs/core/models.py#L13-L13
We should still double check and make sure tests are passing after the removal.
| [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Models for the core app.\"\"\"\nimport logging\n\nfrom annoying.fields import AutoOneToOneField\nfrom django.db import models\nfrom django.urls import reverse\nfrom django.utils.translation import ugettext\nfrom django.utils.translation import ugettext_lazy as _\n\... | [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Models for the core app.\"\"\"\nimport logging\n\nfrom annoying.fields import AutoOneToOneField\nfrom django.db import models\nfrom django.urls import reverse\nfrom django.utils.translation import ugettext\nfrom django.utils.translation import ugettext_lazy as _\n\... | diff --git a/readthedocs/core/models.py b/readthedocs/core/models.py
index d129f181a5e..3fab8570b45 100644
--- a/readthedocs/core/models.py
+++ b/readthedocs/core/models.py
@@ -10,8 +10,6 @@
from django.utils.translation import ugettext_lazy as _
-STANDARD_EMAIL = 'anonymous@readthedocs.org'
-
log = logging.getLogger(__name__)
|
joke2k__faker-146 | timezone() randomly throws an exception
fake.timezone() sometimes throws an exception, possibly when a country doesn't have any timezones defined:
``` python
>>> from faker import Faker
>>> f = Faker()
>>> f.timezone()
'Africa/Mogadishu'
>>> f.timezone()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/vagrant/.python/lib/python3.3/site-packages/faker/providers/date_time.py", line 378, in timezone
return cls.random_element(cls.countries)['timezones'].pop(0)
```
This is with Python 3.3 using fake-factory 0.4.0 from pypi.
| [
{
"content": "# coding=utf-8\n\nfrom __future__ import unicode_literals\nfrom . import BaseProvider\nimport random\nimport re\nfrom time import time, mktime\nfrom datetime import timedelta\nfrom faker.utils.datetime_safe import date, datetime, real_date, real_datetime\nfrom faker.utils import is_string\n\n\ndef... | [
{
"content": "# coding=utf-8\n\nfrom __future__ import unicode_literals\nfrom . import BaseProvider\nimport random\nimport re\nfrom time import time, mktime\nfrom datetime import timedelta\nfrom faker.utils.datetime_safe import date, datetime, real_date, real_datetime\nfrom faker.utils import is_string\n\n\ndef... | diff --git a/faker/providers/date_time.py b/faker/providers/date_time.py
index 7d122d40cf..b91b33464b 100644
--- a/faker/providers/date_time.py
+++ b/faker/providers/date_time.py
@@ -382,7 +382,4 @@ def century(cls):
@classmethod
def timezone(cls):
- return cls.random_element(cls.countries)['timezones'].pop(0)
-
-
-
+ return random.choice(cls.random_element(cls.countries)['timezones'])
|
mitmproxy__mitmproxy-6493 | uppercase breaks block_list
#### Problem Description
using these values for `block_list`
~~~
/~u AccountsSignInUi/444
/~u accountssigninui/444
~~~
neither one is blocking the expected URL:
~~~
https://accounts.google.com/v3/signin/_/AccountsSignInUi/data/batchexecute
~~~
this works:
~~~
/~u .ccounts.ign.n.i/444
~~~
why is uppercase character breaking the search?
#### System Information
tried with both:
~~~
> mitmproxy --version
Mitmproxy: 8.0.0 binary
Python: 3.10.2
OpenSSL: OpenSSL 1.1.1n 15 Mar 2022
Platform: Windows-10-10.0.18363-SP0
> mitmproxy --version
Mitmproxy: 10.0.0 binary
Python: 3.11.4
OpenSSL: OpenSSL 3.1.2 1 Aug 2023
Platform: Windows-10-10.0.18363-SP0
~~~
| [
{
"content": "\"\"\"\n The following operators are understood:\n\n ~q Request\n ~s Response\n\n Headers:\n\n Patterns are matched against \"name: value\" strings. Field names are\n all-lowercase.\n\n ~a Asset content-type in response. Asset con... | [
{
"content": "\"\"\"\n The following operators are understood:\n\n ~q Request\n ~s Response\n\n Headers:\n\n Patterns are matched against \"name: value\" strings. Field names are\n all-lowercase.\n\n ~a Asset content-type in response. Asset con... | diff --git a/CHANGELOG.md b/CHANGELOG.md
index bdac343e75..a80ac80cac 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -18,6 +18,8 @@
([#6543](https://github.com/mitmproxy/mitmproxy/pull/6543), @mhils)
* DNS resolution is now exempted from `--ignore-hosts` in WireGuard Mode.
([#6513](https://github.com/mitmproxy/mitmproxy/pull/6513), @dsphper)
+* Fix case sensitivity of URL added to blocklist
+ ([#6493](https://github.com/mitmproxy/mitmproxy/pull/6493), @emanuele-em)
* Fix a bug where logging was stopped prematurely during shutdown.
([#6541](https://github.com/mitmproxy/mitmproxy/pull/6541), @mhils)
* For plaintext traffic, `--ignore-hosts` now also takes HTTP/1 host headers into account.
diff --git a/mitmproxy/flowfilter.py b/mitmproxy/flowfilter.py
index 52db22be03..840583f3d1 100644
--- a/mitmproxy/flowfilter.py
+++ b/mitmproxy/flowfilter.py
@@ -402,6 +402,7 @@ class FUrl(_Rex):
code = "u"
help = "URL"
is_binary = False
+ flags = re.IGNORECASE
# FUrl is special, because it can be "naked".
diff --git a/test/mitmproxy/addons/test_blocklist.py b/test/mitmproxy/addons/test_blocklist.py
index 9187443b28..b7c7e536d3 100644
--- a/test/mitmproxy/addons/test_blocklist.py
+++ b/test/mitmproxy/addons/test_blocklist.py
@@ -22,20 +22,21 @@ def test_parse_spec_err(filter, err):
class TestBlockList:
@pytest.mark.parametrize(
- "filter,status_code",
+ "filter,request_url,status_code",
[
- (":~u example.org:404", 404),
- (":~u example.com:404", None),
- ("/!jpg/418", None),
- ("/!png/418", 418),
+ (":~u example.org:404", b"https://example.org/images/test.jpg", 404),
+ (":~u example.com:404", b"https://example.org/images/test.jpg", None),
+ (":~u test:404", b"https://example.org/images/TEST.jpg", 404),
+ ("/!jpg/418", b"https://example.org/images/test.jpg", None),
+ ("/!png/418", b"https://example.org/images/test.jpg", 418),
],
)
- def test_block(self, filter, status_code):
+ def test_block(self, filter, request_url, status_code):
bl = blocklist.BlockList()
with taddons.context(bl) as tctx:
tctx.configure(bl, block_list=[filter])
f = tflow.tflow()
- f.request.url = b"https://example.org/images/test.jpg"
+ f.request.url = request_url
bl.request(f)
if status_code is not None:
assert f.response.status_code == status_code
|
google__flax-3540 | Error when using nn.scan with negative output_axes
### System information
- OS Platform and Distribution (e.g., Linux Ubuntu 16.04): N/A
- Flax, jax, jaxlib versions (obtain with `pip show flax jax jaxlib`: ```flax==0.6.11, jax==0.4.9, jaxlib==0.4.9```
- Python version: ```3.8```
- GPU/TPU model and memory: N/A
- CUDA version (if applicable): N/A
### Problem you have encountered:
When using ```flax.linen.scan``` with a negative ```output_axes```, there is an unexpected ```AssertionError```. If I have understood the source code correctly, it is due to a typo [here](https://github.com/google/flax/blob/main/flax/core/axes_scan.py#L103) (namely, a minus sign instead of a plus sign).
### What you expected to happen:
Apply scan as usual, stacking the outputs along the specified axis.
### Logs, error messages, etc:
```
(projectabcde) lucaslingle@Lucass-MacBook-Pro projectabcde % python3 scripts/scan_issue.py
Traceback (most recent call last):
File "scripts/scan_issue.py", line 39, in <module>
main()
File "scripts/scan_issue.py", line 32, in main
params = cls().init(
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/jax/_src/traceback_util.py", line 166, in reraise_with_filtered_traceback
return fun(*args, **kwargs)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/linen/module.py", line 1689, in init
_, v_out = self.init_with_output(
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/jax/_src/traceback_util.py", line 166, in reraise_with_filtered_traceback
return fun(*args, **kwargs)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/linen/module.py", line 1594, in init_with_output
return init_with_output(
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/scope.py", line 968, in wrapper
return apply(fn, mutable=mutable, flags=init_flags)({}, *args, rngs=rngs,
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/scope.py", line 936, in wrapper
y = fn(root, *args, **kwargs)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/linen/module.py", line 2170, in scope_fn
return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/linen/module.py", line 432, in wrapped_module_method
return self._call_wrapped_method(fun, args, kwargs)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/linen/module.py", line 868, in _call_wrapped_method
y = fun(self, *args, **kwargs)
File "scripts/scan_issue.py", line 18, in __call__
_, outputs = nn.scan(
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/linen/transforms.py", line 323, in wrapped_fn
ret = trafo_fn(module_scopes, *args, **kwargs)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/lift.py", line 219, in wrapper
y, out_variable_groups_xs_t = fn(
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/lift.py", line 806, in inner
broadcast_vars, (carry_vars, c), (ys, scan_vars) = scanned(
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/axes_scan.py", line 151, in scan_fn
ys = jax.tree_util.tree_map(transpose_from_front, out_axes, ys)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/jax/_src/tree_util.py", line 210, in tree_map
return treedef.unflatten(f(*xs) for xs in zip(*all_leaves))
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/jax/_src/tree_util.py", line 210, in <genexpr>
return treedef.unflatten(f(*xs) for xs in zip(*all_leaves))
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/axes_scan.py", line 106, in transpose_from_front
return jax.tree_util.tree_map(trans, xs)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/jax/_src/tree_util.py", line 210, in tree_map
return treedef.unflatten(f(*xs) for xs in zip(*all_leaves))
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/jax/_src/tree_util.py", line 210, in <genexpr>
return treedef.unflatten(f(*xs) for xs in zip(*all_leaves))
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/axes_scan.py", line 103, in trans
assert pax < x.ndim
jax._src.traceback_util.UnfilteredStackTrace: AssertionError
The stack trace below excludes JAX-internal frames.
The preceding is the original exception that occurred, unmodified.
--------------------
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "scripts/scan_issue.py", line 39, in <module>
main()
File "scripts/scan_issue.py", line 32, in main
params = cls().init(
File "scripts/scan_issue.py", line 18, in __call__
_, outputs = nn.scan(
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/axes_scan.py", line 151, in scan_fn
ys = jax.tree_util.tree_map(transpose_from_front, out_axes, ys)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/axes_scan.py", line 106, in transpose_from_front
return jax.tree_util.tree_map(trans, xs)
File "/Users/lucaslingle/opt/miniconda3/envs/projectabcde/lib/python3.8/site-packages/flax/core/axes_scan.py", line 103, in trans
assert pax < x.ndim
AssertionError
```
### Steps to reproduce:
```
# issue appears to be at https://github.com/google/flax/blob/main/flax/core/axes_scan.py#L101
import flax.linen as nn
import jax.random
class Foo(nn.Module):
unused_config: int
@nn.compact
def __call__(self, state, input_dict):
return state, nn.Dense(100)(input_dict["x"])
class Bar(nn.Module):
@nn.compact
def __call__(self, x):
_, outputs = nn.scan(
Foo,
variable_broadcast="params",
split_rngs=dict(
params=False,
),
in_axes=0,
out_axes=-1,
)(unused_config=123)(dict(unused_state_item=None), dict(x=x))
return outputs
def main():
cls = Bar
params = cls().init(
{"params": jax.random.PRNGKey(0)},
jax.random.normal(jax.random.PRNGKey(1), shape=[8, 128, 16])
)["params"]
if __name__ == "__main__":
main()
```
Thank you for your attention to this matter!
| [
{
"content": "# Copyright 2023 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by ap... | [
{
"content": "# Copyright 2023 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by ap... | diff --git a/flax/core/axes_scan.py b/flax/core/axes_scan.py
index 8c51ca3df..2ffd347db 100644
--- a/flax/core/axes_scan.py
+++ b/flax/core/axes_scan.py
@@ -100,7 +100,7 @@ def transpose_from_front(ax, xs):
def trans(x):
if ax < 0:
- pax = x.ndim - ax
+ pax = x.ndim + ax
else:
pax = ax
assert pax < x.ndim
|
Kinto__kinto-1302 | Cannot import name `Utc`
While trying to debug #1299 I encountered the following error:
```
$ make serve
...
~/.virtualenvs/test/bin/kinto migrate --ini config/kinto.ini
Traceback (most recent call last):
File "~/.virtualenvs/test/bin/kinto", line 11, in <module>
load_entry_point('kinto', 'console_scripts', 'kinto')()
File "~/.virtualenvs/test/lib/python3.5/site-packages/pkg_resources/__init__.py", line 560, in load_entry_point
return get_distribution(dist).load_entry_point(group, name)
File "~/.virtualenvs/test/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2648, in load_entry_point
return ep.load()
File "~/.virtualenvs/test/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2302, in load
return self.resolve()
File "~/.virtualenvs/test/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2308, in resolve
module = __import__(self.module_name, fromlist=['__name__'], level=0)
File "~/mozilla/kinto/kinto/__init__.py", line 4, in <module>
import kinto.core
File "~/mozilla/kinto/kinto/core/__init__.py", line 10, in <module>
from kinto.core import errors
File "~/mozilla/kinto/kinto/core/errors.py", line 1, in <module>
import colander
File "~/.virtualenvs/test/lib/python3.5/site-packages/colander/__init__.py", line 22, in <module>
from . import iso8601
File "~/.virtualenvs/test/lib/python3.5/site-packages/colander/iso8601.py", line 3, in <module>
from iso8601.iso8601 import (parse_date, ParseError, Utc, FixedOffset, UTC, ZERO, ISO8601_REGEX)
ImportError: cannot import name 'Utc'
Makefile:87 : la recette pour la cible « migrate » a échouée
make: *** [migrate] Erreur 1
```
Cannot import name `Utc`
While trying to debug #1299 I encountered the following error:
```
$ make serve
...
~/.virtualenvs/test/bin/kinto migrate --ini config/kinto.ini
Traceback (most recent call last):
File "~/.virtualenvs/test/bin/kinto", line 11, in <module>
load_entry_point('kinto', 'console_scripts', 'kinto')()
File "~/.virtualenvs/test/lib/python3.5/site-packages/pkg_resources/__init__.py", line 560, in load_entry_point
return get_distribution(dist).load_entry_point(group, name)
File "~/.virtualenvs/test/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2648, in load_entry_point
return ep.load()
File "~/.virtualenvs/test/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2302, in load
return self.resolve()
File "~/.virtualenvs/test/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2308, in resolve
module = __import__(self.module_name, fromlist=['__name__'], level=0)
File "~/mozilla/kinto/kinto/__init__.py", line 4, in <module>
import kinto.core
File "~/mozilla/kinto/kinto/core/__init__.py", line 10, in <module>
from kinto.core import errors
File "~/mozilla/kinto/kinto/core/errors.py", line 1, in <module>
import colander
File "~/.virtualenvs/test/lib/python3.5/site-packages/colander/__init__.py", line 22, in <module>
from . import iso8601
File "~/.virtualenvs/test/lib/python3.5/site-packages/colander/iso8601.py", line 3, in <module>
from iso8601.iso8601 import (parse_date, ParseError, Utc, FixedOffset, UTC, ZERO, ISO8601_REGEX)
ImportError: cannot import name 'Utc'
Makefile:87 : la recette pour la cible « migrate » a échouée
make: *** [migrate] Erreur 1
```
| [
{
"content": "import codecs\nimport os\nfrom setuptools import setup, find_packages\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read_file(filename):\n \"\"\"Open a related file and return its content.\"\"\"\n with codecs.open(os.path.join(here, filename), encoding='utf-8') as f:\n ... | [
{
"content": "import codecs\nimport os\nfrom setuptools import setup, find_packages\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read_file(filename):\n \"\"\"Open a related file and return its content.\"\"\"\n with codecs.open(os.path.join(here, filename), encoding='utf-8') as f:\n ... | diff --git a/setup.py b/setup.py
index 36edbd494..1ffb4863d 100644
--- a/setup.py
+++ b/setup.py
@@ -18,6 +18,7 @@ def read_file(filename):
REQUIREMENTS = [
'bcrypt',
+ 'iso8601==0.1.11', # Refs #1301
'colander >= 1.3.2',
'cornice >= 2.4',
'cornice_swagger >= 0.5.1',
|
pretalx__pretalx-263 | Mail config doesn't work
The fallback/default config is not used, even when an event's mail config is untouched.
| [
{
"content": "import configparser\nimport os\nimport sys\nfrom contextlib import suppress\nfrom urllib.parse import urlparse\n\nfrom django.contrib.messages import constants as messages # NOQA\nfrom django.utils.crypto import get_random_string\nfrom django.utils.translation import ugettext_lazy as _ # NOQA\n\... | [
{
"content": "import configparser\nimport os\nimport sys\nfrom contextlib import suppress\nfrom urllib.parse import urlparse\n\nfrom django.contrib.messages import constants as messages # NOQA\nfrom django.utils.crypto import get_random_string\nfrom django.utils.translation import ugettext_lazy as _ # NOQA\n\... | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index e61b4867ee..207e5d71df 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -9,6 +9,12 @@ vx.x.x
*Released on 2017-xx-xx*
+Breaking Changes
+~~~~~~~~~~~~~~~~
+
+- The default value for email SSL usage is now ``False``, permitting the default
+ configuration of ``localhost:25`` to work on more machines out of the box.
+
Features
~~~~~~~~
diff --git a/doc/install/configure.rst b/doc/install/configure.rst
index a84df48063..0f1ab879cf 100644
--- a/doc/install/configure.rst
+++ b/doc/install/configure.rst
@@ -182,7 +182,7 @@ The mail section
- Should SSL be used when sending mail? Only one of TLS and SSL may be used.
- **Environment variable:** ``PRETALX_MAIL_SSL``
-- **Default:** ``True``
+- **Default:** ``False``
The celery section
------------------
diff --git a/src/pretalx/settings.py b/src/pretalx/settings.py
index df99d2ada9..d3c204eef9 100644
--- a/src/pretalx/settings.py
+++ b/src/pretalx/settings.py
@@ -45,7 +45,7 @@ def reduce_dict(data):
'user': '',
'password': '',
'tls': 'False',
- 'ssl': 'True',
+ 'ssl': 'False',
},
'cache': {
},
|
microsoft__ptvsd-843 | Using sys.exit() with no arguments causes TypeError inside ptvsd
## Environment data
- PTVSD version: 4.1.3
- OS and version: Windows 10
- Python version (& distribution if applicable, e.g. Anaconda): 3.6
- Using VS Code or Visual Studio: VS
## Actual behavior
```
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\program files (x86)\microsoft visual studio\2017\community\common7\ide\extensions\microsoft\python\core\ptvsd_launcher.py", line 119, in <module>
vspd.debug(filename, port_num, debug_id, debug_options, run_as)
File "c:\program files (x86)\microsoft visual studio\2017\community\common7\ide\extensions\microsoft\python\core\Packages\ptvsd\debugger.py", line 37, in debug
run(address, filename, *args, **kwargs)
File "c:\program files (x86)\microsoft visual studio\2017\community\common7\ide\extensions\microsoft\python\core\Packages\ptvsd\_local.py", line 48, in run_file
run(argv, addr, **kwargs)
File "c:\program files (x86)\microsoft visual studio\2017\community\common7\ide\extensions\microsoft\python\core\Packages\ptvsd\_local.py", line 101, in _run
daemon.exitcode = int(ex.code)
TypeError
:
int() argument must be a string, a bytes-like object or a number, not 'NoneType'
```
## Expected behavior
No error
## Steps to reproduce:
1. Debug this code:
```
sys.exit()
```
https://docs.python.org/3/library/sys.html#sys.exit
| [
{
"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport sys\nimport time\n\nimport pydevd\nfrom _pydevd_bundle.pydevd_comm import get_global_debugger\n\nfrom ptvsd.pydevd_hooks import insta... | [
{
"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport sys\nimport time\n\nimport pydevd\nfrom _pydevd_bundle.pydevd_comm import get_global_debugger\n\nfrom ptvsd.pydevd_hooks import insta... | diff --git a/ptvsd/_local.py b/ptvsd/_local.py
index 7872cb1c8..09f671eae 100644
--- a/ptvsd/_local.py
+++ b/ptvsd/_local.py
@@ -139,5 +139,5 @@ def _run(argv, addr, _pydevd=pydevd, _install=install, **kwargs):
try:
_pydevd.main()
except SystemExit as ex:
- daemon.exitcode = int(ex.code)
+ daemon.exitcode = 0 if ex.code is None else int(ex.code)
raise
|
aio-libs__aiohttp-3107 | StreamResponse instances are all equal
## Long story short
Since #2494 , `StreamResponse` inherits `collections.MutableMapping.__eq__`, which makes them basically all equal.
The implementation in `Mapping` looks like `return dict(self.items()) == dict(other.items())`.
This is especially the case for `WebSocketResponse` : when following https://aiohttp.readthedocs.io/en/stable/faq.html#how-do-i-programmatically-close-a-websocket-server-side, if you use a `list` instead of a `set`, it's impossible to `remove` the correct websocket.
## Expected behaviour
```python
>>> from aiohttp.web_ws import WebSocketResponse
>>> r1 = WebSocketResponse()
>>> r2 = WebSocketResponse()
>>> r1 == r2
False
>>> id(r1) == id(r2)
False
>>> r1 is r2
False
>>> hash(r1) == hash(r2)
False
```
As a rule, `a == b` implies `hash(a) == hash(b)`. But it's now broken.
## Actual behaviour
Since v3.0:
```python
>>> r1 == r2
True
>>> id(r1) == id(r2)
False
>>> r1 is r2
False
>>> hash(r1) == hash(r2)
False
```
## Steps to reproduce
Described above
## Your environment
* `aiohttp >= 3.0`
| [
{
"content": "import collections\nimport datetime\nimport enum\nimport json\nimport math\nimport time\nimport warnings\nimport zlib\nfrom email.utils import parsedate\nfrom http.cookies import SimpleCookie\n\nfrom multidict import CIMultiDict, CIMultiDictProxy\n\nfrom . import hdrs, payload\nfrom .helpers impor... | [
{
"content": "import collections\nimport datetime\nimport enum\nimport json\nimport math\nimport time\nimport warnings\nimport zlib\nfrom email.utils import parsedate\nfrom http.cookies import SimpleCookie\n\nfrom multidict import CIMultiDict, CIMultiDictProxy\n\nfrom . import hdrs, payload\nfrom .helpers impor... | diff --git a/CHANGES/3100.bugfix b/CHANGES/3100.bugfix
new file mode 100644
index 00000000000..9d9d3301f69
--- /dev/null
+++ b/CHANGES/3100.bugfix
@@ -0,0 +1 @@
+Fix `StreamResponse` equality, now that they are `MutableMapping` objects.
diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py
index f3c2a5311a8..89835c63342 100644
--- a/aiohttp/web_response.py
+++ b/aiohttp/web_response.py
@@ -432,6 +432,9 @@ def __iter__(self):
def __hash__(self):
return hash(id(self))
+ def __eq__(self, other):
+ return self is other
+
class Response(StreamResponse):
diff --git a/tests/test_web_response.py b/tests/test_web_response.py
index 12a7f25deb2..da414e8c974 100644
--- a/tests/test_web_response.py
+++ b/tests/test_web_response.py
@@ -82,6 +82,14 @@ def test_stream_response_hashable():
hash(StreamResponse())
+def test_stream_response_eq():
+ resp1 = StreamResponse()
+ resp2 = StreamResponse()
+
+ assert resp1 == resp1
+ assert not resp1 == resp2
+
+
def test_stream_response_is_mutable_mapping():
resp = StreamResponse()
assert isinstance(resp, collections.MutableMapping)
|
networkx__networkx-2535 | missing commits
@hagberg , @dschult I just noticed that there is no ``doc/release/api_1.11.rst``, but there is one here:
https://github.com/networkx/networkx/tree/v1.11
in ``doc/source/reference/api_1.11.rst``. It appears this file was never committed on the master branch.
The v1.11 branch is " 59 commits ahead, 1066 commits behind master. " So it looks like there may be a number of missing commits on master. For example, this is also missing:
https://github.com/networkx/networkx/commit/5665c71f3a9aec0325078de2de43537aee03386d
As this shows:
```
$ git lg networkx/drawing/tests/test_agraph.py
* d8ada85 - Make graph attributes work both to/from with agraph (#2507) (11 days ago) [Dan Schult]
* 7bfb768 - Improve drawing test scripts (typos, newlines, methods) (1 year, 5 months ago) [Michael-E-Rose]
* f5031dd - Adjust imports in drawing layouts with graphviz (1 year, 6 months ago) [Dan Schult]
* 9922ec7 - doc, formatting, and whitespace cleanup (5 years ago) [Aric Hagberg]
* 47565b1 - Handle name in translation between pygraphviz (AGraph) and networkx. Fixes #734 (5 years ago) [Aric Hagberg]
* 3665bc1 - Update tests (6 years ago) [Aric Hagberg]
* d41d15f - More imports cleanup and exceptions fixed. (6 years ago) [Loïc Séguin-C.]
* baceff1 - Added tests for multigraph conversion to/from agraph. Changed from_agraph() so that the tests pass. (8 years ago) [dschult]
* ca6df32 - Convert drawing tests to functional tests and use SkipTest if optional packages are not available. (8 years ago) [aric]
```
I suspect that this was unintentional and that I should go through the missing commits and either cherry-pick the appropriate ones or make a new commit when cherry-picking doesn't work. I just wanted to check whether I am correct before I go through the effort. I will make a PR so you can review the commits I grab before merging to master.
| [
{
"content": "\"\"\"Functions to convert NetworkX graphs to and from numpy/scipy matrices.\n\nThe preferred way of converting data to a NetworkX graph is through the\ngraph constuctor. The constructor calls the to_networkx_graph() function\nwhich attempts to guess the input type and convert it automatically.\n... | [
{
"content": "\"\"\"Functions to convert NetworkX graphs to and from numpy/scipy matrices.\n\nThe preferred way of converting data to a NetworkX graph is through the\ngraph constuctor. The constructor calls the to_networkx_graph() function\nwhich attempts to guess the input type and convert it automatically.\n... | diff --git a/doc/news.rst b/doc/news.rst
index 3f9df003576..a02f60858ca 100644
--- a/doc/news.rst
+++ b/doc/news.rst
@@ -8,22 +8,37 @@ NetworkX 2.0
------------
Release date: TBD
-
See :doc:`release/migration_guide_from_1.x_to_2.0`.
API changes
~~~~~~~~~~~
See :doc:`release/release_2.0`.
+NetworkX 1.11
+-------------
+Release date: 30 January 2016
+
+Support for Python 3.5 added, drop support for Python 3.2.
+
+Highlights
+~~~~~~~~~~
+
+Pydot features now use pydotplus.
+Fixes installation on some machines and test with appveyor.
+Restores default center and scale of layout routines.
+Fixes various docs including no symbolic links in examples.
+Docs can now build using autosummary on readthedocs.org.
NetworkX 1.10
--------------
+
Release date: 2 August 2015
Support for Python 2.6 is dropped in this release.
Highlights
~~~~~~~~~~
+
- Connected components now return generators
- new functions including
@@ -61,8 +76,6 @@ Release date: 13 September 2014
Bugfix release for minor installation and documentation issues.
-https://github.com/networkx/networkx/milestones/networkx-1.9.1
-
NetworkX 1.9
------------
Release date: 21 June 2014
diff --git a/doc/release/api_1.11.rst b/doc/release/api_1.11.rst
new file mode 100644
index 00000000000..6a0520a1a1d
--- /dev/null
+++ b/doc/release/api_1.11.rst
@@ -0,0 +1,40 @@
+**********************************
+Version 1.11 notes and API changes
+**********************************
+
+This page includes more detailed release information and API changes from
+NetworkX 1.10 to NetworkX 1.11.
+
+Please send comments and questions to the networkx-discuss mailing list:
+<http://groups.google.com/group/networkx-discuss>.
+
+API changes
+-----------
+* [`#1930 <https://github.com/networkx/networkx/pull/1930>`_]
+ No longer import nx_agraph and nx_pydot into the top-level namespace.
+ They can be accessed within networkx as e.g. ``nx.nx_agraph.write_dot``
+ or imported as ``from networkx.drawing.nx_agraph import write_dot``.
+
+* [`#1750 <https://github.com/networkx/networkx/pull/1750>`_]
+ Arguments center and scale are now available for all layout functions.
+ The defaul values revert to the v1.9 values (center is the origin
+ for circular layouts and domain is [0, scale) for others.
+
+* [`#1924 <https://github.com/networkx/networkx/pull/1924>`_]
+ Replace pydot with pydotplus for drawing with the pydot interface.
+
+* [`#1888 <https://github.com/networkx/networkx/pull/1888>`_]
+ Replace support for Python3.2 with support for Python 3.5.
+
+Miscellaneous changes
+---------------------
+
+* [`#1763 <https://github.com/networkx/networkx/pull/1763>`_]
+ Set up appveyor to automatically test installation on Windows machines.
+ Remove symbolic links in examples to help such istallation.
+
+Change many doc_string typos to allow sphinx
+to build the docs without errors or warnings.
+
+Enable the docs to be automatically built on
+readthedocs.org by changing requirements.txt
diff --git a/doc/release/index.rst b/doc/release/index.rst
index 6320b0d13bb..1b17cd0f6a6 100644
--- a/doc/release/index.rst
+++ b/doc/release/index.rst
@@ -6,6 +6,7 @@ API changes
:maxdepth: 2
release_2.0
+ api_1.11
api_1.10
api_1.9
api_1.8
diff --git a/doc/tutorial.rst b/doc/tutorial.rst
index a59d53aa2e1..e80689e1684 100644
--- a/doc/tutorial.rst
+++ b/doc/tutorial.rst
@@ -456,8 +456,9 @@ PyGraphviz or pydot, are available on your system, you can also use
``nx_agraph.graphviz_layout(G)`` or ``nx_pydot.graphviz_layout(G)`` to get the
node positions, or write the graph in dot format for further processing.
+>>> from networkx.drawing.nx_pydot import write_dot
>>> pos = nx.nx_agraph.graphviz_layout(G)
>>> nx.draw(G, pos=pos)
->>> nx.write_dot(G,'file.dot')
+>>> nx.write_dot(G, 'file.dot')
See :doc:`/reference/drawing` for additional details.
diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py
index 2b1a1882bb9..8938e1447f9 100644
--- a/networkx/convert_matrix.py
+++ b/networkx/convert_matrix.py
@@ -1126,3 +1126,7 @@ def setup_module(module):
import scipy
except:
raise SkipTest("SciPy not available")
+ try:
+ import pandas
+ except:
+ raise SkipTest("Pandas not available")
diff --git a/networkx/drawing/tests/test_agraph.py b/networkx/drawing/tests/test_agraph.py
index f24cf9622ea..780a5e1b1e4 100644
--- a/networkx/drawing/tests/test_agraph.py
+++ b/networkx/drawing/tests/test_agraph.py
@@ -3,6 +3,8 @@
import tempfile
from nose import SkipTest
from nose.tools import assert_true, assert_equal
+from networkx.testing import assert_edges_equal, assert_nodes_equal
+
import networkx as nx
@@ -23,8 +25,8 @@ def build_graph(self, G):
return G
def assert_equal(self, G1, G2):
- assert_equal(sorted(G1.nodes()), sorted(G2.nodes()))
- assert_equal(sorted(G1.edges()), sorted(G2.edges()))
+ assert_nodes_equal(G1.nodes(), G2.nodes())
+ assert_edges_equal(G1.edges(), G2.edges())
assert_equal(G1.graph['metal'], G2.graph['metal'])
def agraph_checks(self, G):
|
Pylons__pyramid-3272 | Bump Sphinx to >=1.7.2
Would anyone be opposed to bumping Sphinx to >=1.7.2, != 1.7.3 in `setup.py`? I really want our PDFs to have `emphasize-lines` support, at long last, and bring in support for Unicode characters in PDFs via xelatex.
Refs:
* #667
* #2572
* https://github.com/rtfd/readthedocs.org/issues/4015
| [
{
"content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of ... | [
{
"content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of ... | diff --git a/setup.py b/setup.py
index c06c5142a1..6a3c8d26b5 100644
--- a/setup.py
+++ b/setup.py
@@ -46,7 +46,7 @@
docs_extras = [
- 'Sphinx >= 1.3.5, != 1.7.3',
+ 'Sphinx >= 1.7.4',
'docutils',
'repoze.sphinx.autointerface',
'pylons_sphinx_latesturl',
|
InternLM__lmdeploy-205 | Serving gradio报错

报错: no module named lmdeploy.serve.gradio
环境里已安装了lmdeploy 0.0.2 python包。
| [
{
"content": "# Copyright (c) OpenMMLab. All rights reserved.\nfrom typing import Tuple\n\n__version__ = '0.0.2'\nshort_version = __version__\n\n\ndef parse_version_info(version_str: str) -> Tuple:\n \"\"\"Parse version from a string.\n\n Args:\n version_str (str): A string represents a version inf... | [
{
"content": "# Copyright (c) OpenMMLab. All rights reserved.\nfrom typing import Tuple\n\n__version__ = '0.0.3'\nshort_version = __version__\n\n\ndef parse_version_info(version_str: str) -> Tuple:\n \"\"\"Parse version from a string.\n\n Args:\n version_str (str): A string represents a version inf... | diff --git a/lmdeploy/version.py b/lmdeploy/version.py
index 8960a13972..81a7f9de1a 100644
--- a/lmdeploy/version.py
+++ b/lmdeploy/version.py
@@ -1,7 +1,7 @@
# Copyright (c) OpenMMLab. All rights reserved.
from typing import Tuple
-__version__ = '0.0.2'
+__version__ = '0.0.3'
short_version = __version__
|
mozmeao__basket-836 | Expand set of fields for PII scrubbing
Based on what I'm seeing, we should add `primary_email` to the list of fields we scrub before sending to Sentry.
| [
{
"content": "import os\nimport platform\nimport socket\nimport struct\nimport sys\nfrom datetime import timedelta\nfrom pathlib import Path\n\nimport dj_database_url\nimport django_cache_url\nimport sentry_sdk\nfrom decouple import Csv, UndefinedValueError, config\nfrom sentry_processor import DesensitizationP... | [
{
"content": "import os\nimport platform\nimport socket\nimport struct\nimport sys\nfrom datetime import timedelta\nfrom pathlib import Path\n\nimport dj_database_url\nimport django_cache_url\nimport sentry_sdk\nfrom decouple import Csv, UndefinedValueError, config\nfrom sentry_processor import DesensitizationP... | diff --git a/basket/base/tests/test__utils.py b/basket/base/tests/test__utils.py
index 0989b987d..d6ffe5c71 100644
--- a/basket/base/tests/test__utils.py
+++ b/basket/base/tests/test__utils.py
@@ -43,6 +43,7 @@ def test_pre_sentry_sanitisation__before_send_setup():
"last_name",
"mobile_number",
"payee_id",
+ "primary_email",
"remote_addr",
"remoteaddresschain",
"token",
diff --git a/basket/settings.py b/basket/settings.py
index d37245dfa..a40cb9e92 100644
--- a/basket/settings.py
+++ b/basket/settings.py
@@ -356,6 +356,7 @@ def get_default_gateway_linux():
"last_name",
"mobile_number",
"payee_id",
+ "primary_email",
"remote_addr",
"remoteaddresschain",
"token",
|
web2py__web2py-1498 | unittest response.render() inside scheduler
to avoid #1485 to happen again
| [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\n| This file is part of the web2py Web Framework\n| Developed by Massimo Di Pierro <mdipierro@cs.depaul.edu>,\n| limodou <limodou@gmail.com> and srackham <srackham@gmail.com>.\n| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)\n\nWeb... | [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\n| This file is part of the web2py Web Framework\n| Developed by Massimo Di Pierro <mdipierro@cs.depaul.edu>,\n| limodou <limodou@gmail.com> and srackham <srackham@gmail.com>.\n| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)\n\nWeb... | diff --git a/gluon/shell.py b/gluon/shell.py
index 9d93f1a6d..b6c8f27ba 100644
--- a/gluon/shell.py
+++ b/gluon/shell.py
@@ -254,7 +254,7 @@ def run(
die(errmsg)
if f:
- exec('print %s()' % f, _env)
+ exec('print( %s())' % f, _env)
return
_env.update(exec_pythonrc())
diff --git a/gluon/tests/test_scheduler.py b/gluon/tests/test_scheduler.py
index b473fe7b5..ee1270fba 100644
--- a/gluon/tests/test_scheduler.py
+++ b/gluon/tests/test_scheduler.py
@@ -601,7 +601,8 @@ def inner_teardown(self):
fdest = os.path.join(current.request.folder, 'models', 'scheduler.py')
os.unlink(fdest)
additional_files = [
- os.path.join(current.request.folder, 'private', 'demo8.pholder')
+ os.path.join(current.request.folder, 'private', 'demo8.pholder'),
+ os.path.join(current.request.folder, 'views', 'issue_1485_2.html'),
]
for f in additional_files:
try:
@@ -609,6 +610,12 @@ def inner_teardown(self):
except:
pass
+ def writeview(self, content, dest=None):
+ from gluon import current
+ fdest = os.path.join(current.request.folder, 'views', dest)
+ with open(fdest, 'w') as q:
+ q.write(content)
+
def writefunction(self, content, initlines=None):
from gluon import current
fdest = os.path.join(current.request.folder, 'models', 'scheduler.py')
@@ -620,6 +627,9 @@ def writefunction(self, content, initlines=None):
db_dal = os.path.abspath(os.path.join(request.folder, '..', '..', 'dummy2.db'))
sched_dal = DAL('sqlite://%s' % db_dal, folder=os.path.dirname(db_dal))
sched = Scheduler(sched_dal, max_empty_runs=15, migrate=False, heartbeat=1)
+def termination():
+ sched.terminate()
+ sched_dal.commit()
"""
with open(fdest, 'w') as q:
q.write(initlines)
@@ -699,10 +709,11 @@ def testNoReturn_and_Timeout_and_Progress(self):
timeout1 = s.queue_task('demo4', timeout=5)
timeout2 = s.queue_task('demo4')
progress = s.queue_task('demo6', sync_output=2)
+ termination = s.queue_task('termination')
self.db.commit()
self.writefunction(r"""
def demo3():
- time.sleep(15)
+ time.sleep(3)
print(1/0)
return None
@@ -712,7 +723,7 @@ def demo4():
return dict(a=1, b=2)
def demo5():
- time.sleep(15)
+ time.sleep(3)
print("I'm printing something")
rtn = dict(a=1, b=2)
@@ -758,6 +769,7 @@ def testDrift_and_env_and_immediate(self):
immediate = s.queue_task('demo1', ['a', 'b'], dict(c=1, d=2), immediate=True)
env = s.queue_task('demo7')
drift = s.queue_task('demo1', ['a', 'b'], dict(c=1, d=2), period=93, prevent_drift=True)
+ termination = s.queue_task('termination')
self.db.commit()
self.writefunction(r"""
def demo1(*args,**vars):
@@ -844,27 +856,41 @@ def demo8():
]
self.exec_asserts(res, 'FAILED_CONSECUTIVE')
- def testHugeResult(self):
+ def testRegressions(self):
s = Scheduler(self.db)
huge_result = s.queue_task('demo10', retry_failed=1, period=1)
+ issue_1485 = s.queue_task('issue_1485')
+ termination = s.queue_task('termination')
self.db.commit()
self.writefunction(r"""
def demo10():
res = 'a' * 99999
return dict(res=res)
+
+def issue_1485():
+ return response.render('issue_1485.html', dict(variable='abc'))
""")
+ self.writeview(r"""<span>{{=variable}}</span>""", 'issue_1485.html')
ret = self.exec_sched()
# process finished just fine
self.assertEqual(ret, 0)
# huge_result - checks
- task = s.task_status(huge_result.id, output=True)
+ task_huge = s.task_status(huge_result.id, output=True)
res = [
- ("task status completed", task.scheduler_task.status == 'COMPLETED'),
- ("task times_run is 1", task.scheduler_task.times_run == 1),
- ("result is the correct one", task.result == dict(res='a' * 99999))
+ ("task status completed", task_huge.scheduler_task.status == 'COMPLETED'),
+ ("task times_run is 1", task_huge.scheduler_task.times_run == 1),
+ ("result is the correct one", task_huge.result == dict(res='a' * 99999))
]
self.exec_asserts(res, 'HUGE_RESULT')
+ task_issue_1485 = s.task_status(issue_1485.id, output=True)
+ res = [
+ ("task status completed", task_issue_1485.scheduler_task.status == 'COMPLETED'),
+ ("task times_run is 1", task_issue_1485.scheduler_task.times_run == 1),
+ ("result is the correct one", task_issue_1485.result == '<span>abc</span>')
+ ]
+ self.exec_asserts(res, 'issue_1485')
+
if __name__ == '__main__':
unittest.main()
|
searx__searx-2391 | SUGGESTION: Contacting the instance's maintainer(s)
Hello, so I use searx, but I personally think that there should be any way to contact the maintainer(s) of a public instance (email for example). It is harder to trust this awesome service if there is no way to contact the maintainer(s).
| [
{
"content": "GIT_URL = 'https://github.com/searx/searx'\nGIT_BRANCH = 'master'\nISSUE_URL = 'https://github.com/searx/searx/issues'\nSEARX_URL = 'https://searx.me'\nDOCS_URL = 'https://searx.github.io/searx'\nPUBLIC_INSTANCES = 'https://searx.space'\n",
"path": "searx/brand.py"
}
] | [
{
"content": "GIT_URL = 'https://github.com/searx/searx'\nGIT_BRANCH = 'master'\nISSUE_URL = 'https://github.com/searx/searx/issues'\nSEARX_URL = 'https://searx.me'\nDOCS_URL = 'https://searx.github.io/searx'\nPUBLIC_INSTANCES = 'https://searx.space'\nCONTACT_URL = 'mailto:contact@example.com'\n",
"path": "... | diff --git a/Makefile b/Makefile
index 7704b337f6..8a4a38e1c4 100644
--- a/Makefile
+++ b/Makefile
@@ -6,6 +6,7 @@ export GIT_URL=https://github.com/searx/searx
export GIT_BRANCH=master
export SEARX_URL=https://searx.me
export DOCS_URL=https://searx.github.io/searx
+export CONTACT_URL=mailto:contact@example.com
# END Makefile setup
include utils/makefile.include
@@ -46,6 +47,7 @@ help-min:
@echo ' SEARX_URL = $(SEARX_URL)'
@echo ' GIT_URL = $(GIT_URL)'
@echo ' DOCS_URL = $(DOCS_URL)'
+ @echo ' CONTACT_URL = $(CONTACT_URL)'
@echo ''
@$(MAKE) -e -s make-help
@@ -124,6 +126,7 @@ buildenv:
$(Q)echo "SEARX_URL = '$(SEARX_URL)'" >> searx/brand.py
$(Q)echo "DOCS_URL = '$(DOCS_URL)'" >> searx/brand.py
$(Q)echo "PUBLIC_INSTANCES = 'https://searx.space'" >> searx/brand.py
+ $(Q)echo "CONTACT_URL = '$(CONTACT_URL)'" >> searx/brand.py
$(Q)echo "build utils/brand.env"
$(Q)echo "export GIT_URL='$(GIT_URL)'" > utils/brand.env
$(Q)echo "export GIT_BRANCH='$(GIT_BRANCH)'" >> utils/brand.env
@@ -131,6 +134,7 @@ buildenv:
$(Q)echo "export SEARX_URL='$(SEARX_URL)'" >> utils/brand.env
$(Q)echo "export DOCS_URL='$(DOCS_URL)'" >> utils/brand.env
$(Q)echo "export PUBLIC_INSTANCES='https://searx.space'" >> utils/brand.env
+ $(Q)echo "export CONTACT_URL='$(CONTACT_URL)'" >> utils/brand.env
# node / npm
diff --git a/searx/brand.py b/searx/brand.py
index d71c57db7c..0eaaf0be39 100644
--- a/searx/brand.py
+++ b/searx/brand.py
@@ -4,3 +4,4 @@
SEARX_URL = 'https://searx.me'
DOCS_URL = 'https://searx.github.io/searx'
PUBLIC_INSTANCES = 'https://searx.space'
+CONTACT_URL = 'mailto:contact@example.com'
diff --git a/searx/templates/oscar/base.html b/searx/templates/oscar/base.html
index 7b3d33f7a3..c8f390d3a0 100644
--- a/searx/templates/oscar/base.html
+++ b/searx/templates/oscar/base.html
@@ -88,7 +88,8 @@
{{ _('Powered by') }} <a href="{{ brand.DOCS_URL }}">searx</a> - {{ searx_version }} - {{ _('a privacy-respecting, hackable metasearch engine') }}<br/>
<a href="{{ brand.GIT_URL }}">{{ _('Source code') }}</a> |
<a href="{{ brand.ISSUE_URL }}">{{ _('Issue tracker') }}</a> |
- <a href="{{ brand.PUBLIC_INSTANCES }}">{{ _('Public instances') }}</a>
+ <a href="{{ brand.PUBLIC_INSTANCES }}">{{ _('Public instances') }}</a> |
+ <a href="{{ brand.CONTACT_URL }}">{{ _('Contact instance maintainer') }}</a>
</small>
</p>
</div>
diff --git a/searx/templates/simple/base.html b/searx/templates/simple/base.html
index 10fb424bf0..2318f6bfce 100644
--- a/searx/templates/simple/base.html
+++ b/searx/templates/simple/base.html
@@ -54,7 +54,8 @@
{{ _('Powered by') }} <a href="{{ url_for('about') }}">searx</a> - {{ searx_version }} — {{ _('a privacy-respecting, hackable metasearch engine') }}<br/>
<a href="{{ brand.GIT_URL }}">{{ _('Source code') }}</a> |
<a href="{{ brand.ISSUE_URL }}">{{ _('Issue tracker') }}</a> |
- <a href="{{ brand.PUBLIC_INSTANCES }}">{{ _('Public instances') }}</a>
+ <a href="{{ brand.PUBLIC_INSTANCES }}">{{ _('Public instances') }}</a> |
+ <a href="{{ brand.CONTACT_URL }}">{{ _('Contact instance maintainer') }}</a>
</p>
</footer>
<!--[if gte IE 9]>-->
diff --git a/utils/brand.env b/utils/brand.env
index 55244bd724..660160e43d 100644
--- a/utils/brand.env
+++ b/utils/brand.env
@@ -4,3 +4,4 @@ export ISSUE_URL='https://github.com/searx/searx/issues'
export SEARX_URL='https://searx.me'
export DOCS_URL='https://searx.github.io/searx'
export PUBLIC_INSTANCES='https://searx.space'
+export CONTACT_URL='mailto:contact@example.com'
|
pymodbus-dev__pymodbus-1395 | pip show pymodbus, misses information.
```
pymodbus) pymodbus % pip show pymodbus
Name: pymodbus
Version: 3.1.x
Summary: A fully featured modbus protocol stack in python
Home-page: https://github.com/pymodbus-dev/pymodbus/
Author: attr: pymodbus.__author__
Author-email:
License: BSD-3-Clause
Location: /Users/jan/repos/pymodbus
Editable project location: /Users/jan/repos/pymodbus
Requires: setuptools
Required-by:
```
Normally it gets the information from setup.cfg, but for some reason it does not work with "pip show".
| [
{
"content": "\"\"\"Pymodbus: Modbus Protocol Implementation.\n\nReleased under the the BSD license\n\"\"\"\n\nfrom logging import WARNING\n\nimport pymodbus.version as __version\nfrom pymodbus.logging import Log\n\n\n__version__ = __version.version.short()\n__author__ = \"Galen Collins\"\n__maintainer__ = \"dh... | [
{
"content": "\"\"\"Pymodbus: Modbus Protocol Implementation.\n\nReleased under the the BSD license\n\"\"\"\n\nfrom logging import WARNING\n\nimport pymodbus.version as __version\nfrom pymodbus.logging import Log\n\n\n__version__ = __version.version.short()\n__author__ = \"Galen Collins, Jan Iversen\"\n__mainta... | diff --git a/pymodbus/__init__.py b/pymodbus/__init__.py
index 89bc4b9df..2d61c1290 100644
--- a/pymodbus/__init__.py
+++ b/pymodbus/__init__.py
@@ -10,7 +10,7 @@
__version__ = __version.version.short()
-__author__ = "Galen Collins"
+__author__ = "Galen Collins, Jan Iversen"
__maintainer__ = "dhoomakethu, janiversen"
diff --git a/setup.cfg b/setup.cfg
index 90eeeb230..46645d00b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -2,8 +2,8 @@
[metadata]
name = pymodbus
version = attr: pymodbus.__version__
-author = attr: pymodbus.__author__
-maintainer = attr: pymodbus.__maintainer__
+author = "Galen Collins, Jan Iversen"
+maintainer = "dhoomakethu, janiversen"
license = BSD-3-Clause
platforms = 'Linux', 'Mac OS X', 'Win'
description = A fully featured modbus protocol stack in python
|
piskvorky__gensim-919 | import gensim fails since updating to Xcode 7.3
I just updated my version of Xcode to 7.3. When I run `pip install --upgrade gensim` the process completed without any issues. However, when I try `import gensim` within the python shell the terminal barfs a bunch of C++ output with a block of execution errors that begins with:
`Exception: Compilation failed (return status=1): clang: error: unsupported option '-b mi2'. clang: error: unsupported option '-b mi'. clang: error: unknown argument: '-target-feature'. clang: error: unknown argument: '-sse4a'. clang: error: unknown argument: '-target-feature'. clang: error: unknown argument: '-target-feature'. clang: error: unknown argument: '-target-feature'. clang: error: unknown argument: '-tbm'. clang: error: unknown argument: '-target-feature'. clang: error: unknown argument: '-target-feature'....`
I think this has something to do with where gensim is looking for its header files, but I'm somewhat at a loss. Any help debugging would be greatly appreciated.
| [
{
"content": "\"\"\"\nThis package contains implementations of various streaming corpus I/O format.\n\"\"\"\n\n# bring corpus classes directly into package namespace, to save some typing\nfrom .indexedcorpus import IndexedCorpus # must appear before the other classes\n\nfrom .mmcorpus import MmCorpus\nfrom .ble... | [
{
"content": "\"\"\"\nThis package contains implementations of various streaming corpus I/O format.\n\"\"\"\n\n# bring corpus classes directly into package namespace, to save some typing\nfrom .indexedcorpus import IndexedCorpus # must appear before the other classes\n\nfrom .mmcorpus import MmCorpus\nfrom .ble... | diff --git a/gensim/corpora/__init__.py b/gensim/corpora/__init__.py
index a577cdc59d..a11a0df229 100644
--- a/gensim/corpora/__init__.py
+++ b/gensim/corpora/__init__.py
@@ -15,4 +15,3 @@
from .textcorpus import TextCorpus
from .ucicorpus import UciCorpus
from .malletcorpus import MalletCorpus
-from .sharded_corpus import ShardedCorpus
|
searx__searx-2358 | Bug in external command engine, resulting in engine crash.
**Version of Searx, commit number if you are using on master branch and stipulate if you forked Searx**
```
commit a0ddc27766271428d6f1f906c774cf6f5ccbf3fa (HEAD -> master)
Merge: 8c887382 cdceec1c
Author: Searx Service Account <searx@leandra.virtadpt.net>
Date: Sat Dec 5 17:21:41 2020 -0800
Merge branch 'master' of https://github.com/asciimoo/searx
```
**How did you install Searx?**
Installed using instructions from the official wiki, by hand.
**What happened?**
I went to the Searx page on my server and ran the query `!locate art bell mp3` after enabling the Locate search engine in `/opt/searx/searx/searx/settings.yml`.
**How To Reproduce**
Enable the Locate search engine in settings.yml thusly:
```
- name: locate
engine: command
command: ['locate', '--existing', '--ignore-case', '{{QUERY}}']
shortcut: locate
tokens: []
disabled: False
delimiter:
chars: ' '
keys: ['line']
```
Restart Searx.
Execute a `!locate` search while watching Searx's output, either by running it by hand or using `journalctl -xf` to tail the systemd journal.
**Expected behavior**
Searx runs the `locate` command on the server and returns the results.
**Screenshots & Logs**
Logs (datestamp, hostname, and PID elided to prevent having to scroll back and forth repeatedly):
```
: Traceback (most recent call last):
: File "/opt/searx/searx/searx/search.py", line 281, in search_one_offline_request_safe
: search_results = search_one_offline_request(engine, query, request_params)
: File "/opt/searx/searx/searx/search.py", line 274, in search_one_offline_request
: return engine.search(query, request_params)
: File "/opt/searx/searx/searx/engines/command.py", line 70, in search
: cmd = _get_command_to_run(query)
: File "/opt/searx/searx/searx/engines/command.py", line 83, in _get_command_to_run
: params = shlex_split(query.decode('utf-8'))
: AttributeError: 'str' object has no attribute 'decode'
```
**Additional context**
Searx is being run as a system service, through systemd, with a searx.sh shell script:
```
#!/usr/bin/env bash
SEARX=/opt/searx/searx
# Change to the Searx installation directory.
cd $SEARX
# Initialize the Python virtual environment.
. env/bin/activate
# Start up Searx.
#python searx/webapp.py
uwsgi --ini searx.ini
```
Searx is being run with uwsgi to improve responsiveness.
Other searches on this instance are not impacted in this manner.
| [
{
"content": "'''\nsearx is free software: you can redistribute it and/or modify\nit under the terms of the GNU Affero General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nsearx is distributed in the hope that it will b... | [
{
"content": "'''\nsearx is free software: you can redistribute it and/or modify\nit under the terms of the GNU Affero General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nsearx is distributed in the hope that it will b... | diff --git a/searx/engines/command.py b/searx/engines/command.py
index 08ee5da06a..0268d52eb7 100644
--- a/searx/engines/command.py
+++ b/searx/engines/command.py
@@ -80,7 +80,7 @@ def search(query, params):
def _get_command_to_run(query):
- params = shlex_split(query.decode('utf-8'))
+ params = shlex_split(query)
__check_query_params(params)
cmd = []
|
googleapis__google-cloud-python-1865 | Read the Docs build failing
https://readthedocs.org/projects/gcloud-python/builds/4108022/
https://readthedocs.org/projects/gcloud-python/builds/4108027/
| [
{
"content": "import os\nimport sys\n\nfrom setuptools import setup\nfrom setuptools import find_packages\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\nwith open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n\n\nREQUIREMENTS = [\n 'httplib2 >= 0.9.1',\n 'googleapis-common-pro... | [
{
"content": "import os\nimport sys\n\nfrom setuptools import setup\nfrom setuptools import find_packages\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\nwith open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n\n\nREQUIREMENTS = [\n 'httplib2 >= 0.9.1',\n 'googleapis-common-pro... | diff --git a/setup.py b/setup.py
index 977dd551523d..6757414aa0b0 100644
--- a/setup.py
+++ b/setup.py
@@ -25,7 +25,7 @@
'gax-google-pubsub-v1',
]
-if sys.version_info[:2] == (2, 7):
+if sys.version_info[:2] == (2, 7) and 'READTHEDOCS' not in os.environ:
REQUIREMENTS.extend(GRPC_EXTRAS)
setup(
|
pyro-ppl__pyro-3164 | PyTorch 2.0 compatibility: Explicit PyTorch 1.x check causing issues with packages that depend on PyTorch / pyro (e.g. BoTorch)
### Issue Description
The explicit check for PyTorch 1.x here (https://github.com/pyro-ppl/pyro/blob/dev/pyro/distributions/torch_patch.py#L10) is causing problems when another package has a dependency on PyTorch + Pyro, since PyTorch is now at 2.0. For example, it is causing BoTorch tests to fail here (https://github.com/pytorch/botorch/pull/1551).
Could this check be removed to allow for PyTorch 2.0?
### Environment
Mac OS 11.7.1
Python 3.10
PyTorch 2.0
Pyro 1.8.3
### Code Snippet
https://github.com/pytorch/botorch/actions/runs/3659534850/jobs/6185642011
| [
{
"content": "# Copyright (c) 2017-2019 Uber Technologies, Inc.\n# SPDX-License-Identifier: Apache-2.0\n\nimport functools\nimport math\nimport weakref\n\nimport torch\n\nassert torch.__version__.startswith(\"1.\")\n\n\ndef patch_dependency(target, root_module=torch):\n parts = target.split(\".\")\n asser... | [
{
"content": "# Copyright (c) 2017-2019 Uber Technologies, Inc.\n# SPDX-License-Identifier: Apache-2.0\n\nimport functools\nimport math\nimport weakref\n\nimport torch\n\n\ndef patch_dependency(target, root_module=torch):\n parts = target.split(\".\")\n assert parts[0] == root_module.__name__\n module ... | diff --git a/pyro/distributions/torch_patch.py b/pyro/distributions/torch_patch.py
index 55d98f6650..cad559b3cf 100644
--- a/pyro/distributions/torch_patch.py
+++ b/pyro/distributions/torch_patch.py
@@ -7,8 +7,6 @@
import torch
-assert torch.__version__.startswith("1.")
-
def patch_dependency(target, root_module=torch):
parts = target.split(".")
|
facebookresearch__nevergrad-705 | Wrong dates in changelog
All the dates at https://github.com/facebookresearch/nevergrad/blob/master/CHANGELOG.md shows 2019, but seems it should be 2020.
| [
{
"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\n# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a sel... | [
{
"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\n# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a sel... | diff --git a/CHANGELOG.md b/CHANGELOG.md
index a99352abc..c178866e0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -11,7 +11,7 @@
and [#691](https://github.com/facebookresearch/nevergrad/pull/691).
-## 0.4.1 (2019-05-07)
+## 0.4.1 (2020-05-07)
- `Archive` now stores the best corresponding candidate. This requires twice the memory compared to before the change. [#594](https://github.com/facebookresearch/nevergrad/pull/594)
- `Parameter` now holds a `loss: Optional[float]` attribute which is set and used by optimizers after the `tell` method.
@@ -23,7 +23,7 @@
- Started implementing more ML-oriented testbeds [#642](https://github.com/facebookresearch/nevergrad/pull/642)
-## v0.4.0 (2019-03-09)
+## v0.4.0 (2020-03-09)
### Breaking and important changes
@@ -54,7 +54,7 @@
if is automatically set to a sensible default [#536](https://github.com/facebookresearch/nevergrad/pull/536).
-## v0.3.2 (2019-02-05)
+## v0.3.2 (2020-02-05)
### Breaking changes (possibly for next version)
@@ -80,7 +80,7 @@
- `DE` algorithms comply with the new parametrization system and can be set to use parameter's recombination.
- Fixed array as bounds in `Array` parameters
-## v0.3.1 (2019-01-23)
+## v0.3.1 (2020-01-23)
**Note**: this is the first step to propagate the instrumentation/parametrization framework.
Learn more on the [Facebook user group](https://www.facebook.com/notes/nevergrad-users/moving-to-new-parametrization-upcoming-unstability-and-breaking-changes/639090766861215/).
@@ -100,7 +100,7 @@
- `PSO` now uses initialization by sampling the parametrization, instead of sampling all the real space. A new `WidePSO`
optimizer was created, using the previous initial sampling method [#467](https://github.com/facebookresearch/nevergrad/pull/467).
-## v0.3.0 (2019-01-08)
+## v0.3.0 (2020-01-08)
**Note**: this version is stable, but the following versions will include breaking changes which may cause instability. The aim of this changes will be to update the instrumentation system for more flexibility. See PR #323 and [Fb user group](https://www.facebook.com/groups/nevergradusers/) for more information.
diff --git a/docs/conf.py b/docs/conf.py
index 75f0bc26b..6f0fc8257 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -70,4 +70,5 @@
html_static_path = []
# -- Other --
-linkcheck_ignore = [r'https://gecco-2020.sigevo.org/*']
+linkcheck_ignore = [r'https://gecco-2020.sigevo.org/*',
+ r'https://arxiv.org/abs/*'] # Transient certificate error :(
|
Kinto__kinto-981 | JSON Merge Patch deserialization is broken
Merge-patch operations are broken. It looks like a deserialization problem, which may have been introduced with #790. Also, we should definitely include some view tests for this content-type. My bad :/
```
echo '{"data": {"aaa": "bbb"}}' | http patch localhost:8888/v1/buckets/b1 Content-Type:application/merge-patch+json -a aaa:
HTTP/1.1 400 Bad Request
Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff
Content-Length: 331
Content-Type: application/json; charset=UTF-8
Date: Sun, 11 Dec 2016 23:47:26 GMT
Server: waitress
{
"code": 400,
"details": [
{
"description": "\"{\"data\": {\"aaa\": \"bbb\"}}\n\" is not a mapping type: Does not implement dict-like functionality.",
"location": "body",
"name": ""
}
],
"errno": 107,
"error": "Invalid parameters",
"message": "\"{\"data\": {\"aaa\": \"bbb\"}}\n\" is not a mapping type: Does not implement dict-like functionality."
}
```
JSON Merge Patch deserialization is broken
Merge-patch operations are broken. It looks like a deserialization problem, which may have been introduced with #790. Also, we should definitely include some view tests for this content-type. My bad :/
```
echo '{"data": {"aaa": "bbb"}}' | http patch localhost:8888/v1/buckets/b1 Content-Type:application/merge-patch+json -a aaa:
HTTP/1.1 400 Bad Request
Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff
Content-Length: 331
Content-Type: application/json; charset=UTF-8
Date: Sun, 11 Dec 2016 23:47:26 GMT
Server: waitress
{
"code": 400,
"details": [
{
"description": "\"{\"data\": {\"aaa\": \"bbb\"}}\n\" is not a mapping type: Does not implement dict-like functionality.",
"location": "body",
"name": ""
}
],
"errno": 107,
"error": "Invalid parameters",
"message": "\"{\"data\": {\"aaa\": \"bbb\"}}\n\" is not a mapping type: Does not implement dict-like functionality."
}
```
JSON Merge Patch deserialization is broken
Merge-patch operations are broken. It looks like a deserialization problem, which may have been introduced with #790. Also, we should definitely include some view tests for this content-type. My bad :/
```
echo '{"data": {"aaa": "bbb"}}' | http patch localhost:8888/v1/buckets/b1 Content-Type:application/merge-patch+json -a aaa:
HTTP/1.1 400 Bad Request
Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff
Content-Length: 331
Content-Type: application/json; charset=UTF-8
Date: Sun, 11 Dec 2016 23:47:26 GMT
Server: waitress
{
"code": 400,
"details": [
{
"description": "\"{\"data\": {\"aaa\": \"bbb\"}}\n\" is not a mapping type: Does not implement dict-like functionality.",
"location": "body",
"name": ""
}
],
"errno": 107,
"error": "Invalid parameters",
"message": "\"{\"data\": {\"aaa\": \"bbb\"}}\n\" is not a mapping type: Does not implement dict-like functionality."
}
```
| [
{
"content": "import platform\nimport codecs\nimport os\nfrom setuptools import setup, find_packages\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read_file(filename):\n \"\"\"Open a related file and return its content.\"\"\"\n with codecs.open(os.path.join(here, filename), encoding='utf-8... | [
{
"content": "import platform\nimport codecs\nimport os\nfrom setuptools import setup, find_packages\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read_file(filename):\n \"\"\"Open a related file and return its content.\"\"\"\n with codecs.open(os.path.join(here, filename), encoding='utf-8... | diff --git a/requirements.txt b/requirements.txt
index 0b8b3bff7..aea371fa8 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,7 @@
colander==1.3.1
colorama==0.3.7
contextlib2==0.5.4
-cornice==2.1.0
+cornice==2.3.0
enum34==1.1.6
functools32==3.2.3.post2
futures==3.0.5
diff --git a/setup.py b/setup.py
index e23f4d793..3f114a915 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def read_file(filename):
REQUIREMENTS = [
'colander',
'colorama',
- 'cornice >= 2.1',
+ 'cornice >= 2.3',
'jsonschema',
'jsonpatch',
'python-dateutil',
diff --git a/tests/core/resource/test_record.py b/tests/core/resource/test_record.py
index b57762ec6..a427ab993 100644
--- a/tests/core/resource/test_record.py
+++ b/tests/core/resource/test_record.py
@@ -228,9 +228,67 @@ def test_patch_record_returns_updated_fields(self):
self.assertEquals(self.stored['id'], self.result['id'])
self.assertEquals(self.result['position'], 10)
+ def test_record_timestamp_is_not_updated_if_none_for_missing_field(self):
+ self.resource.request.json = {'data': {'polo': None}}
+ result = self.resource.patch()['data']
+ self.assertEquals(self.result['last_modified'],
+ result['last_modified'])
+
+ def test_record_timestamp_is_not_updated_if_no_field_changed(self):
+ self.resource.request.json = {'data': {'position': 10}}
+ result = self.resource.patch()['data']
+ self.assertEquals(self.result['last_modified'],
+ result['last_modified'])
+
+ def test_collection_timestamp_is_not_updated_if_no_field_changed(self):
+ self.resource.request.json = {'data': {'position': 10}}
+ self.resource.patch()
+ self.resource = self.resource_class(request=self.get_request(),
+ context=self.get_context())
+ self.resource.collection_get()['data']
+ last_modified = int(self.last_response.headers['ETag'][1:-1])
+ self.assertEquals(self.result['last_modified'], last_modified)
+
+ def test_timestamp_is_not_updated_if_no_change_after_preprocessed(self):
+ with mock.patch.object(self.resource, 'process_record') as mocked:
+ mocked.return_value = self.result
+ self.resource.request.json = {'data': {'position': 20}}
+ result = self.resource.patch()['data']
+ self.assertEquals(self.result['last_modified'],
+ result['last_modified'])
+
+ def test_returns_changed_fields_among_provided_if_behaviour_is_diff(self):
+ self.resource.request.json = {'data': {'unread': True, 'position': 15}}
+ self.resource.request.headers['Response-Behavior'] = 'diff'
+ with mock.patch.object(self.resource.model, 'update_record',
+ return_value={'unread': True, 'position': 0}):
+ result = self.resource.patch()['data']
+ self.assertDictEqual(result, {'position': 0})
+
+ def test_returns_changed_fields_if_behaviour_is_light(self):
+ self.resource.request.json = {'data': {'unread': True, 'position': 15}}
+ self.resource.request.headers['Response-Behavior'] = 'light'
+ with mock.patch.object(self.resource.model, 'update_record',
+ return_value={'unread': True, 'position': 0}):
+ result = self.resource.patch()['data']
+ self.assertDictEqual(result, {'unread': True, 'position': 0})
+
+
+class MergePatchTest(BaseTest):
+ def setUp(self):
+ super(MergePatchTest, self).setUp()
+ self.stored = self.model.create_record({})
+ self.resource.record_id = self.stored['id']
+ self.headers = self.resource.request.headers
+ self.headers['Content-Type'] = 'application/merge-patch+json'
+
+ class ArticleSchema(ResourceSchema):
+ unread = colander.SchemaNode(colander.Boolean(), missing=colander.drop)
+ position = colander.SchemaNode(colander.Int(), missing=colander.drop)
+
+ self.resource.schema = ArticleSchema
+
def test_merge_patch_updates_attributes_recursively(self):
- header = self.resource.request.headers
- header['Content-Type'] = 'application/merge-patch+json'
self.resource.request.json = {'data': {'a': {'b': 'bbb',
'c': 'ccc'}}}
self.resource.patch()
@@ -240,8 +298,6 @@ def test_merge_patch_updates_attributes_recursively(self):
self.assertEqual(result['a']['b'], 'aaa')
def test_merge_patch_removes_attribute_if_none(self):
- header = self.resource.request.headers
- header['Content-Type'] = 'application/merge-patch+json'
self.resource.request.json = {'data': {'field': 'aaa'}}
self.resource.patch()
self.resource.request.json = {'data': {'field': None}}
@@ -251,8 +307,6 @@ def test_merge_patch_removes_attribute_if_none(self):
self.assertNotIn('field', result)
def test_merge_patch_removes_attributes_recursively_if_none(self):
- header = self.resource.request.headers
- header['Content-Type'] = 'application/merge-patch+json'
self.resource.request.json = {'data': {'a': {'b': 'aaa'}}}
self.resource.patch()
self.resource.request.json = {'data': {'a': {'b': None}}}
@@ -266,8 +320,6 @@ def test_merge_patch_removes_attributes_recursively_if_none(self):
self.assertNotIn('cc', result['aa']['bb'])
def test_merge_patch_doesnt_remove_attribute_if_false(self):
- header = self.resource.request.headers
- header['Content-Type'] = 'application/merge-patch+json'
self.resource.request.json = {'data': {'field': 0}}
result = self.resource.patch()['data']
self.assertIn('field', result)
@@ -279,8 +331,7 @@ def test_merge_patch_doesnt_remove_attribute_if_false(self):
self.assertIn('field', result)
def test_patch_doesnt_remove_attribute_if_not_merge_header(self):
- header = self.resource.request.headers
- header['Content-Type'] = 'application/json'
+ self.headers['Content-Type'] = 'application/json'
self.resource.request.json = {'data': {'field': 'aaa'}}
self.resource.patch()
self.resource.request.json = {'data': {'field': None}}
@@ -290,63 +341,17 @@ def test_patch_doesnt_remove_attribute_if_not_merge_header(self):
self.assertIn('field', result)
def test_merge_patch_doesnt_remove_previously_inserted_nones(self):
- header = self.resource.request.headers
- header['Content-Type'] = 'application/json'
+ self.headers['Content-Type'] = 'application/json'
self.resource.request.json = {'data': {'field': 'aaa'}}
result = self.resource.patch()['data']
self.resource.request.json = {'data': {'field': None}}
result = self.resource.patch()['data']
self.assertIn('field', result)
- header['Content-Type'] = 'application/merge-patch+json'
+ self.headers['Content-Type'] = 'application/merge-patch+json'
self.resource.request.json = {'data': {'position': 10}}
result = self.resource.patch()['data']
self.assertIn('field', result)
- def test_record_timestamp_is_not_updated_if_none_for_missing_field(self):
- self.resource.request.json = {'data': {'polo': None}}
- result = self.resource.patch()['data']
- self.assertEquals(self.result['last_modified'],
- result['last_modified'])
-
- def test_record_timestamp_is_not_updated_if_no_field_changed(self):
- self.resource.request.json = {'data': {'position': 10}}
- result = self.resource.patch()['data']
- self.assertEquals(self.result['last_modified'],
- result['last_modified'])
-
- def test_collection_timestamp_is_not_updated_if_no_field_changed(self):
- self.resource.request.json = {'data': {'position': 10}}
- self.resource.patch()
- self.resource = self.resource_class(request=self.get_request(),
- context=self.get_context())
- self.resource.collection_get()['data']
- last_modified = int(self.last_response.headers['ETag'][1:-1])
- self.assertEquals(self.result['last_modified'], last_modified)
-
- def test_timestamp_is_not_updated_if_no_change_after_preprocessed(self):
- with mock.patch.object(self.resource, 'process_record') as mocked:
- mocked.return_value = self.result
- self.resource.request.json = {'data': {'position': 20}}
- result = self.resource.patch()['data']
- self.assertEquals(self.result['last_modified'],
- result['last_modified'])
-
- def test_returns_changed_fields_among_provided_if_behaviour_is_diff(self):
- self.resource.request.json = {'data': {'unread': True, 'position': 15}}
- self.resource.request.headers['Response-Behavior'] = 'diff'
- with mock.patch.object(self.resource.model, 'update_record',
- return_value={'unread': True, 'position': 0}):
- result = self.resource.patch()['data']
- self.assertDictEqual(result, {'position': 0})
-
- def test_returns_changed_fields_if_behaviour_is_light(self):
- self.resource.request.json = {'data': {'unread': True, 'position': 15}}
- self.resource.request.headers['Response-Behavior'] = 'light'
- with mock.patch.object(self.resource.model, 'update_record',
- return_value={'unread': True, 'position': 0}):
- result = self.resource.patch()['data']
- self.assertDictEqual(result, {'unread': True, 'position': 0})
-
class JsonPatchTest(BaseTest):
def setUp(self):
diff --git a/tests/test_views_records.py b/tests/test_views_records.py
index 1e1787042..d1559a11c 100644
--- a/tests/test_views_records.py
+++ b/tests/test_views_records.py
@@ -301,6 +301,49 @@ def test_records_can_be_created_after_deletion(self):
headers=headers, status=201)
+class RecordsViewMergeTest(BaseWebTest, unittest.TestCase):
+
+ collection_url = '/buckets/beers/collections/barley/records'
+ _record_url = '/buckets/beers/collections/barley/records/%s'
+
+ def setUp(self):
+ super(RecordsViewMergeTest, self).setUp()
+ self.app.put_json('/buckets/beers', MINIMALIST_BUCKET,
+ headers=self.headers)
+ self.app.put_json('/buckets/beers/collections/barley',
+ MINIMALIST_COLLECTION,
+ headers=self.headers)
+ record = MINIMALIST_RECORD.copy()
+ record['data'] = {}
+ record['data']['grain'] = {'one': 1}
+ resp = self.app.post_json(self.collection_url,
+ record,
+ headers=self.headers)
+ self.record = resp.json['data']
+ self.record_url = self._record_url % self.record['id']
+
+ def test_merge_patch(self):
+ headers = self.headers.copy()
+ headers['Content-Type'] = 'application/merge-patch+json'
+ json = {'data': {'grain': {'two': 2}}}
+ resp = self.app.patch_json(self.record_url,
+ json,
+ headers=headers,
+ status=200)
+ self.assertEquals(resp.json['data']['grain']['one'], 1)
+ self.assertEquals(resp.json['data']['grain']['two'], 2)
+
+ def test_merge_patch_remove_nones(self):
+ headers = self.headers.copy()
+ headers['Content-Type'] = 'application/merge-patch+json'
+ json = {'data': {'grain': {'one': None}}}
+ resp = self.app.patch_json(self.record_url,
+ json,
+ headers=headers,
+ status=200)
+ self.assertNotIn('one', resp.json['data']['grain'])
+
+
class RecordsViewPatchTest(BaseWebTest, unittest.TestCase):
collection_url = '/buckets/beers/collections/barley/records'
|
freedomofpress__securedrop-5236 | qa_loader.py uses a fixed random seed every run
## Description
Always using the same seed makes it impossible to run `qa_loader.py` multiple times with the same database, as supposedly random values aren't, causing unique constraint violations.
## Steps to Reproduce
- Run the dev server with `make dev`
- Start a shell in the container with `docker exec -it securedrop-dev-0 bash`
- In that shell, run `./qa_loader --journalist-count 1 --source-count 1`
- Run the same command a second time.
## Expected Behavior
That you could keep adding random journalists and sources to the database.
## Actual Behavior
You get `sqlalchemy.exc.IntegrityError: (sqlite3.IntegrityError) UNIQUE constraint failed: journalists.username` because [`random.seed` is always called with the same value](https://github.com/freedomofpress/securedrop/blob/ec2220c3c2b9120d029b616d3a07647b175bc6ab/securedrop/qa_loader.py#L22).
| [
{
"content": "#!/opt/venvs/securedrop-app-code/bin/python\n# -*- coding: utf-8 -*-\n\nimport os\nimport random\nimport string\nimport sys\nfrom argparse import ArgumentParser\nfrom datetime import datetime\nfrom itertools import cycle\nfrom os import path\n\nfrom flask import current_app\n\nfrom crypto_util imp... | [
{
"content": "#!/opt/venvs/securedrop-app-code/bin/python\n# -*- coding: utf-8 -*-\n\nimport os\nimport random\nimport string\nimport sys\nfrom argparse import ArgumentParser\nfrom datetime import datetime\nfrom itertools import cycle\nfrom os import path\n\nfrom flask import current_app\n\nfrom crypto_util imp... | diff --git a/securedrop/qa_loader.py b/securedrop/qa_loader.py
index 8f76be13e1..c4fa054da9 100755
--- a/securedrop/qa_loader.py
+++ b/securedrop/qa_loader.py
@@ -19,9 +19,6 @@
from sdconfig import config as sdconfig
-random.seed("~(=^–^)") # mrow?
-
-
def random_bool():
return bool(random.getrandbits(1))
|
apache__airflow-14978 | Bump supported mysqlclient to <1.5
**Description**
version 1.4.X introduced in Jan 2019
we should support it if we can.
**Use case / motivation**
pin of <1.4 was done in https://github.com/apache/airflow/pull/4558 due to lack of Python 2 compatibility. Since Master doesn't support Python 2 anymore there is no need for that restriction
**Related Issues**
Moved from https://issues.apache.org/jira/browse/AIRFLOW-4810
I tried to fix it in https://github.com/apache/airflow/pull/5430 but didn't get help with the tests so if any one wants to pick it up be my guest.
| [
{
"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (th... | [
{
"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (th... | diff --git a/docs/apache-airflow-providers-mysql/index.rst b/docs/apache-airflow-providers-mysql/index.rst
index 12a21edab4f68..ad46fd87e928b 100644
--- a/docs/apache-airflow-providers-mysql/index.rst
+++ b/docs/apache-airflow-providers-mysql/index.rst
@@ -93,7 +93,7 @@ PIP requirements
PIP package Version required
========================== ======================
``mysql-connector-python`` ``>=8.0.11, <=8.0.22``
-``mysqlclient`` ``>=1.3.6,<1.4``
+``mysqlclient`` ``>=1.3.6,<3``
========================== ======================
Cross provider package dependencies
diff --git a/setup.py b/setup.py
index 332d43cc63b3c..79c37ec00deac 100644
--- a/setup.py
+++ b/setup.py
@@ -365,7 +365,7 @@ def get_sphinx_theme_version() -> str:
]
mysql = [
'mysql-connector-python>=8.0.11, <=8.0.22',
- 'mysqlclient>=1.3.6,<1.4',
+ 'mysqlclient>=1.3.6,<3',
]
neo4j = ['neo4j>=4.2.1']
odbc = [
|
ManimCommunity__manim-3166 | Not all arrow tips are accessible
## Description of bug / unexpected behavior
<!-- Add a clear and concise description of the problem you encountered. -->
The [manim.mobject.geometry.tips](https://docs.manim.community/en/stable/_modules/manim/mobject/geometry/tips.html#ArrowTriangleFilledTip) file has presents of some arrow tips to use. The list `__all__` contains:
```py
__all__ = [
"ArrowTip",
"ArrowCircleFilledTip",
"ArrowCircleTip",
"ArrowSquareTip",
"ArrowSquareFilledTip",
]
```
## Expected behavior
<!-- Add a clear and concise description of what you expected to happen. -->
Instead, it should have:
```py
__all__ = [
"ArrowTip",
"ArrowCircleFilledTip",
"ArrowCircleTip",
"ArrowSquareTip",
"ArrowSquareFilledTip"
"ArrowTriangleTip", # added
"ArrowTriangleFilledTip", # added
]
```
## How to reproduce the issue
<!-- Provide a piece of code illustrating the undesired behavior. -->
<details><summary>Code for reproducing the problem</summary>
```py
class Test(Scene):
def construct(self):
my_line = Line()
my_line.add_tip(ArrowTriangleFilledTip(fill_color=WHITE))
self.add(my_line)
```
</details>
## Additional media files
<!-- Paste in the files manim produced on rendering the code above. -->
None
<!-- Insert screenshots here (only when absolutely necessary, we prefer copy/pasted output!) -->
</details>
## System specifications
<details><summary>System Details</summary>
- OS: macOS 13.0.1 (Ventura)
- RAM: 8GB
- Python version: Python 3.10.9
- Installed modules: manim 0.17.2
| [
{
"content": "r\"\"\"A collection of tip mobjects for use with :class:`~.TipableVMobject`.\"\"\"\n\nfrom __future__ import annotations\n\n__all__ = [\n \"ArrowTip\",\n \"ArrowCircleFilledTip\",\n \"ArrowCircleTip\",\n \"ArrowSquareTip\",\n \"ArrowSquareFilledTip\",\n]\n\nimport numpy as np\n\nfro... | [
{
"content": "r\"\"\"A collection of tip mobjects for use with :class:`~.TipableVMobject`.\"\"\"\n\nfrom __future__ import annotations\n\n__all__ = [\n \"ArrowTip\",\n \"ArrowCircleFilledTip\",\n \"ArrowCircleTip\",\n \"ArrowSquareTip\",\n \"ArrowSquareFilledTip\",\n \"ArrowTriangleTip\",\n ... | diff --git a/manim/mobject/geometry/tips.py b/manim/mobject/geometry/tips.py
index b25a5a5380..0c82697fdc 100644
--- a/manim/mobject/geometry/tips.py
+++ b/manim/mobject/geometry/tips.py
@@ -8,6 +8,8 @@
"ArrowCircleTip",
"ArrowSquareTip",
"ArrowSquareFilledTip",
+ "ArrowTriangleTip",
+ "ArrowTriangleFilledTip",
]
import numpy as np
|
NVIDIA__apex-564 | RuntimeError: "GeluCUDAKernelImpl" not implemented for 'Half'
PyTorch 1.2 introduced the `gelu` activation function. Unfortunately, this leads to terminal errors when using with AMP.
Trace (`self.activation` is `gelu`):
```
Traceback (most recent call last):
File "predict.py", line 282, in <module>
predictor.predict()
File "predict.py", line 74, in predict
fig = trainer.train()
File "/home/bram/Python/projects/transformer-classifiers/transformer_classifiers/TransformerTrainer.py", line 232, in train
self._process('train', epoch)
File "/home/bram/Python/projects/transformer-classifiers/transformer_classifiers/TransformerTrainer.py", line 124, in _process
preds = self.model(input_ids, attention_mask=input_mask)
File "/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/modules/module.py", line 541, in __call__
result = self.forward(*input, **kwargs)
File "/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/parallel/distributed.py", line 442, in forward
output = self.module(*inputs[0], **kwargs[0])
File "/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/modules/module.py", line 541, in __call__
result = self.forward(*input, **kwargs)
File "/home/bram/Python/projects/transformer-classifiers/transformer_classifiers/models.py", line 140, in forward
cls_output = self.activation(cls_output)
File "/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/functional.py", line 1126, in gelu
return torch._C._nn.gelu(input)
RuntimeError: "GeluCUDAKernelImpl" not implemented for 'Half'
```
| [
{
"content": "\n# TODO: think about the following two. They do weird things.\n# - torch.nn.utils.clip_grad (but it should always be fp32 anyway)\n# - torch.nn.utils.weight_norm\n\n# Notes:\n# F.instance_norm uses batch_norm internally. Which correctly handles\n# fp16 in/out with fp32 weights. So we shouldn't ... | [
{
"content": "\n# TODO: think about the following two. They do weird things.\n# - torch.nn.utils.clip_grad (but it should always be fp32 anyway)\n# - torch.nn.utils.weight_norm\n\n# Notes:\n# F.instance_norm uses batch_norm internally. Which correctly handles\n# fp16 in/out with fp32 weights. So we shouldn't ... | diff --git a/apex/amp/lists/functional_overrides.py b/apex/amp/lists/functional_overrides.py
index 3ea6a4918..d1dfcd0ea 100644
--- a/apex/amp/lists/functional_overrides.py
+++ b/apex/amp/lists/functional_overrides.py
@@ -37,7 +37,8 @@
'softmin',
'log_softmax',
'softmax',
-
+ 'gelu',
+
# Normalization
'layer_norm',
'group_norm',
|
vyperlang__vyper-3207 | variables named `UNREACHABLE` can be shadowed by `raise`and `assert` when used with `UNREACHABLE`
### Version Information
* vyper Version (output of `vyper --version`): 0.3.8+commit.6020b8bb
* OS: OSX
* Python Version (output of `python --version`): 3.8.0
### What's your issue about?
`UNREACHABLE` is not a reserved keyword. For someone who is not familiar with the custom semantic of `UNREACHABLE` when used as a reason string for a `raise` or `assert` statement, the fact that in this context, any previously defined variable named `UNREACHABLE` will be shadowed by this new semantic might be confusing.
To illustrate this, in the following contract, a call to `bar` will revert with `invalid opcode` and not `this is unreachable` as one could imagine.
```Vyper
@external
def bar():
UNREACHABLE: String[20] = "this is unreachable"
x: uint256 = 3
assert 2>x, UNREACHABLE
```
### How can it be fixed?
An option could be to make `UNREACHABLE` a reserved keyword but some other alternative less restrictive might be better.
| [
{
"content": "import contextlib\nimport re\n\nfrom vyper.evm.opcodes import OPCODES\nfrom vyper.exceptions import (\n CompilerPanic,\n NamespaceCollision,\n StructureException,\n UndeclaredDefinition,\n)\nfrom vyper.semantics.analysis.levenshtein_utils import get_levenshtein_error_suggestions\n\n\nc... | [
{
"content": "import contextlib\nimport re\n\nfrom vyper.evm.opcodes import OPCODES\nfrom vyper.exceptions import (\n CompilerPanic,\n NamespaceCollision,\n StructureException,\n UndeclaredDefinition,\n)\nfrom vyper.semantics.analysis.levenshtein_utils import get_levenshtein_error_suggestions\n\n\nc... | diff --git a/vyper/semantics/namespace.py b/vyper/semantics/namespace.py
index 5aa530ea01..752ef7ad96 100644
--- a/vyper/semantics/namespace.py
+++ b/vyper/semantics/namespace.py
@@ -158,6 +158,7 @@ def validate_identifier(attr):
"assert",
"raise",
"throw",
+ "unreachable",
# special functions (no name mangling)
"init",
"_init_",
|
django__channels-1223 | InMemoryChannelLayer: unused variable local_poll_interval
https://github.com/django/channels/blob/5feecdb6a0df720651851f377aee3587d07eceef/channels/layers.py#L199
| [
{
"content": "from __future__ import unicode_literals\n\nimport asyncio\nimport fnmatch\nimport random\nimport re\nimport string\nimport time\nfrom copy import deepcopy\n\nfrom django.conf import settings\nfrom django.core.signals import setting_changed\nfrom django.utils.module_loading import import_string\n\n... | [
{
"content": "from __future__ import unicode_literals\n\nimport asyncio\nimport fnmatch\nimport random\nimport re\nimport string\nimport time\nfrom copy import deepcopy\n\nfrom django.conf import settings\nfrom django.core.signals import setting_changed\nfrom django.utils.module_loading import import_string\n\n... | diff --git a/channels/layers.py b/channels/layers.py
index d8d4ff591..5223d69b1 100644
--- a/channels/layers.py
+++ b/channels/layers.py
@@ -196,8 +196,6 @@ class InMemoryChannelLayer(BaseChannelLayer):
In-memory channel layer implementation
"""
- local_poll_interval = 0.01
-
def __init__(
self,
expiry=60,
|
kivy__python-for-android-2436 | "diff" files are ignored during "pip install ."
in `setup.py` the "diff" is not listed:
https://github.com/kivy/python-for-android/blob/develop/setup.py
```python
package_data = {'': ['*.tmpl',
'*.patch', ], }
```
and therefore this `diff` patch:
https://github.com/kivy/python-for-android/blob/develop/pythonforandroid/recipes/python3/patches/reproducible-buildinfo.diff
is not installed during `pip` invocation:
```sh
cd /tmp
git clone --depth 1 https://github.com/kivy/python-for-android.git
cd python-for-android
pip install .
```
| [
{
"content": "\nimport glob\nfrom io import open # for open(..,encoding=...) parameter in python 2\nfrom os import walk\nfrom os.path import join, dirname, sep\nimport re\nfrom setuptools import setup, find_packages\n\n# NOTE: All package data should also be set in MANIFEST.in\n\npackages = find_packages()\n\n... | [
{
"content": "\nimport glob\nfrom io import open # for open(..,encoding=...) parameter in python 2\nfrom os import walk\nfrom os.path import join, dirname, sep\nimport re\nfrom setuptools import setup, find_packages\n\n# NOTE: All package data should also be set in MANIFEST.in\n\npackages = find_packages()\n\n... | diff --git a/setup.py b/setup.py
index 25e4a0d041..2d056124a1 100644
--- a/setup.py
+++ b/setup.py
@@ -11,7 +11,8 @@
packages = find_packages()
package_data = {'': ['*.tmpl',
- '*.patch', ], }
+ '*.patch',
+ '*.diff', ], }
data_files = []
|
ranaroussi__yfinance-295 | Deprecation warning due to invalid escape sequences
Deprecation warnings are raised due to invalid escape sequences. This can be fixed by using raw strings or escaping the literals. pyupgrade also helps in automatic conversion : https://github.com/asottile/pyupgrade/
```
find . -iname '*.py' | grep -Ev 'test.py' | xargs -P4 -I{} python3.8 -Wall -m py_compile {}
./yfinance/utils.py:67: DeprecationWarning: invalid escape sequence \g
return [_re.sub("([a-z])([A-Z])", "\g<1> \g<2>", i).title() for i in o]
```
| [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Yahoo! Finance market data downloader (+fix for Pandas Datareader)\n# https://github.com/ranaroussi/yfinance\n#\n# Copyright 2017-2019 Ran Aroussi\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this fi... | [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Yahoo! Finance market data downloader (+fix for Pandas Datareader)\n# https://github.com/ranaroussi/yfinance\n#\n# Copyright 2017-2019 Ran Aroussi\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this fi... | diff --git a/yfinance/utils.py b/yfinance/utils.py
index 53fdc0808..79ee9cfc5 100644
--- a/yfinance/utils.py
+++ b/yfinance/utils.py
@@ -64,7 +64,7 @@ def get_json(url, proxy=None):
def camel2title(o):
- return [_re.sub("([a-z])([A-Z])", "\g<1> \g<2>", i).title() for i in o]
+ return [_re.sub("([a-z])([A-Z])", r"\g<1> \g<2>", i).title() for i in o]
def auto_adjust(data):
|
google__turbinia-1070 | Missing sys module import in logger.py
Logger module is missing an import statement for 'sys'
| [
{
"content": "# -*- coding: utf-8 -*-\n# Copyright 2017 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# U... | [
{
"content": "# -*- coding: utf-8 -*-\n# Copyright 2017 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# U... | diff --git a/turbinia/config/logger.py b/turbinia/config/logger.py
index 2d10830bf..f15f39756 100644
--- a/turbinia/config/logger.py
+++ b/turbinia/config/logger.py
@@ -20,6 +20,7 @@
import warnings
import logging.handlers
import os
+import sys
from turbinia import config
from turbinia import TurbiniaException
|
scikit-image__scikit-image-1124 | NameError on del version when init has ImportError
In `__init__.py`, `del version` gives `NameError` when `ImportError` happens.
```
try:
from .version import version as __version__
except ImportError:
__version__ = "unbuilt-dev"
del version
```
should be
```
try:
from .version import version as __version__
except ImportError:
__version__ = "unbuilt-dev"
else:
del version
```
| [
{
"content": "\"\"\"Image Processing SciKit (Toolbox for SciPy)\n\n``scikit-image`` (a.k.a. ``skimage``) is a collection of algorithms for image\nprocessing and computer vision.\n\nThe main package of ``skimage`` only provides a few utilities for converting\nbetween image data types; for most features, you need... | [
{
"content": "\"\"\"Image Processing SciKit (Toolbox for SciPy)\n\n``scikit-image`` (a.k.a. ``skimage``) is a collection of algorithms for image\nprocessing and computer vision.\n\nThe main package of ``skimage`` only provides a few utilities for converting\nbetween image data types; for most features, you need... | diff --git a/skimage/__init__.py b/skimage/__init__.py
index 59c80ed2c15..d1f98ff7484 100644
--- a/skimage/__init__.py
+++ b/skimage/__init__.py
@@ -69,7 +69,8 @@
from .version import version as __version__
except ImportError:
__version__ = "unbuilt-dev"
-del version
+else:
+ del version
try:
|
ethereum__web3.py-3083 | RuntimeError: release unlocked lock
* Version: 6.8.0
* Python: 3.11.1
* OS: linux
* `pip freeze` output
```
pip freeze 4167ms
aiofiles==23.1.0
aiohttp==3.8.5
aiosignal==1.3.1
alembic==1.11.3
async-timeout==4.0.2
asyncpg==0.28.0
attrs==23.1.0
base58==2.1.1
bitarray==2.7.5
certifi==2023.5.7
charset-normalizer==3.1.0
cytoolz==0.12.1
ecs-logging==2.1.0
eth-abi==4.1.0
eth-account==0.9.0
eth-hash==0.5.2
eth-keyfile==0.6.1
eth-keys==0.4.0
eth-rlp==0.3.0
eth-typing==3.4.0
eth-utils==2.2.0
frozenlist==1.3.3
grpcio==1.57.0
grpcio-tools==1.57.0
hexbytes==0.3.1
html5tagger==1.3.0
httptools==0.5.0
idna==3.4
jsonschema==4.17.3
lru-dict==1.2.0
Mako==1.2.4
MarkupSafe==2.1.2
multidict==6.0.4
numpy==1.25.2
parsimonious==0.9.0
prometheus-client==0.17.1
protobuf==4.23.0
pycryptodome==3.18.0
pydantic==1.10.12
pyrsistent==0.19.3
pyunormalize==15.0.0
PyYAML==6.0
redis==5.0.0
regex==2023.6.3
requests==2.31.0
rlp==3.0.0
sanic==23.6.0
sanic-ext==23.6.0
sanic-routing==23.6.0
SQLAlchemy==2.0.20
toolz==0.12.0
tracerite==1.1.0
typing_extensions==4.5.0
ujson==5.7.0
urllib3==2.0.2
uvloop==0.17.0
web3==6.8.0
websockets==11.0.3
yarl==1.9.2
```
### What was wrong?
* The code which produced the error
```py
provider = AsyncHTTPProvider(request.app.config.get("ETHEREUM_MAINNET_URL"))
w3 = AsyncWeb3(provider)
contract = w3.eth.contract(
address=MAINNET_TOKEN_ADDRESS_DETECTION,
abi=single_call_balance_checker_abi,
)
address_keys = list(TOKEN_METADATA_MAP.keys())
(native_balance, balance_values) = await asyncio.gather(
w3.eth.get_balance(to_checksum_address(address)),
contract.functions.balances(
[to_checksum_address(address)],
address_keys,
).call(),
)
```
* The full output of the error
```py
File "handle_request", line 97, in handle_request
File "/app/data_service/ethereum/views/balances.py", line 54, in get_balances
(native_balance, balance_values) = await asyncio.gather(
^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/eth/async_eth.py", line 435, in get_balance
return await self._get_balance(account, block_identifier)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/module.py", line 114, in caller
result = await async_w3.manager.coro_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/manager.py", line 264, in coro_request
response = await self._coro_make_request(method, params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/manager.py", line 199, in _coro_make_request
return await request_func(method, params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/middleware/gas_price_strategy.py", line 126, in middleware
return await make_request(method, params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/middleware/names.py", line 139, in middleware
return await make_request(method, params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/middleware/attrdict.py", line 69, in middleware
response = await make_request(method, params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/middleware/formatting.py", line 165, in middleware
response = await make_request(method, params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/middleware/buffered_gas_estimate.py", line 58, in middleware
return await make_request(method, params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/middleware/exception_retry_request.py", line 151, in middleware
return await make_request(method, params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/providers/async_rpc.py", line 91, in make_request
raw_response = await async_make_post_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/_utils/request.py", line 239, in async_make_post_request
response = await async_get_response_from_post_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/_utils/request.py", line 231, in async_get_response_from_post_request
session = await async_cache_and_return_session(endpoint_uri)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/_utils/request.py", line 141, in async_cache_and_return_session
async with async_lock(_async_session_pool, _async_session_cache_lock):
File "/usr/local/lib/python3.11/contextlib.py", line 204, in __aenter__
return await anext(self.gen)
^^^^^^^^^^^^^^^^^^^^^
File "/app/.venv/lib/python3.11/site-packages/web3/_utils/async_caching.py", line 21, in async_lock
lock.release()
```
* What type of node you were connecting to.
AsyncHTTPProvider (Infura)
### How can it be fixed?
I tried to reproduce this error locally, but it only occurs in production.
| [
{
"content": "import asyncio\nfrom concurrent.futures import (\n ThreadPoolExecutor,\n)\nimport contextlib\nimport threading\nfrom typing import (\n AsyncGenerator,\n)\n\n\n@contextlib.asynccontextmanager\nasync def async_lock(\n thread_pool: ThreadPoolExecutor, lock: threading.Lock\n) -> AsyncGenerato... | [
{
"content": "import asyncio\nfrom concurrent.futures import (\n ThreadPoolExecutor,\n)\nimport contextlib\nimport threading\nfrom typing import (\n AsyncGenerator,\n)\n\n\n@contextlib.asynccontextmanager\nasync def async_lock(\n thread_pool: ThreadPoolExecutor, lock: threading.Lock\n) -> AsyncGenerato... | diff --git a/newsfragments/3083.bugfix.rst b/newsfragments/3083.bugfix.rst
new file mode 100644
index 0000000000..4845959fd0
--- /dev/null
+++ b/newsfragments/3083.bugfix.rst
@@ -0,0 +1 @@
+Only release ``async_lock`` if it's locked to begin with.
diff --git a/web3/_utils/async_caching.py b/web3/_utils/async_caching.py
index 4997a162f2..42a7e1aaa0 100644
--- a/web3/_utils/async_caching.py
+++ b/web3/_utils/async_caching.py
@@ -18,4 +18,5 @@ async def async_lock(
await loop.run_in_executor(thread_pool, lock.acquire)
yield
finally:
- lock.release()
+ if lock.locked():
+ lock.release()
|
edgedb__edgedb-1057 | Bad pretty printing of datetime
Here is what I get:
```
edgedb> SELECT <datetime>'2020-01-08T17:03:06.026178+00:00';
{<local_date>'2020-01-08T17:03:06.026178+00:00'}
```
Well the `datetime` in python is a subclass of `date` but `singledispatch` is supposed to handle that well. Do we have a patched singledispatch now?
| [
{
"content": "#\n# This source file is part of the EdgeDB open source project.\n#\n# Copyright 2019-present MagicStack Inc. and the EdgeDB authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy... | [
{
"content": "#\n# This source file is part of the EdgeDB open source project.\n#\n# Copyright 2019-present MagicStack Inc. and the EdgeDB authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy... | diff --git a/edb/repl/render_binary.py b/edb/repl/render_binary.py
index 134f4fd2d94..54a61f5feb4 100644
--- a/edb/repl/render_binary.py
+++ b/edb/repl/render_binary.py
@@ -275,6 +275,7 @@ def _empty(
buf.write('{}', style.bracket)
+@walk.register
def _datetime(
o: datetime.datetime,
repl_ctx: context.ReplContext,
|
sopel-irc__sopel-1044 | [announce] Send confirmation to caller after all channels announced
When Sopel is in many channels, announces are likely to be rate-limited. This makes it hard to know, for example, when it's safe to shut down the bot if announce is being used to broadcast an upgrade notice.
It's an easy fix, and I'll open a PR for it tomorrow if there are no objections.
I am as-yet undecided whether it's best to use `bot.reply()` or `bot.notice()` for this (or even `bot.msg()` via PM to the caller), but I'll think about it between now and when I open the PR, and it can always be changed before merging.
| [
{
"content": "# coding=utf-8\n\"\"\"\nannounce.py - Send a message to all channels\nCopyright © 2013, Elad Alfassa, <elad@fedoraproject.org>\nLicensed under the Eiffel Forum License 2.\n\n\"\"\"\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nfrom sopel.module import comma... | [
{
"content": "# coding=utf-8\n\"\"\"\nannounce.py - Send a message to all channels\nCopyright © 2013, Elad Alfassa, <elad@fedoraproject.org>\nLicensed under the Eiffel Forum License 2.\n\n\"\"\"\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nfrom sopel.module import comma... | diff --git a/sopel/modules/announce.py b/sopel/modules/announce.py
index 2b3df4bc60..0f3f217ace 100644
--- a/sopel/modules/announce.py
+++ b/sopel/modules/announce.py
@@ -21,3 +21,4 @@ def announce(bot, trigger):
return
for channel in bot.channels:
bot.msg(channel, '[ANNOUNCEMENT] %s' % trigger.group(2))
+ bot.reply('Announce complete.')
|
sanic-org__sanic-1343 | Pin versions for LTS release
I think that versions of (some) should be allowed to float but when we are ready for an LTS release, the versions should be pinned at that time.
@r0fls @ahopkins @seemethere @ashleysommer @yunstanford @ahopkins
| [
{
"content": "\"\"\"\nSanic\n\"\"\"\nimport codecs\nimport os\nimport re\nfrom distutils.errors import DistutilsPlatformError\nfrom distutils.util import strtobool\n\nfrom setuptools import setup\n\n\ndef open_local(paths, mode='r', encoding='utf8'):\n path = os.path.join(\n os.path.abspath(os.path.di... | [
{
"content": "\"\"\"\nSanic\n\"\"\"\nimport codecs\nimport os\nimport re\nfrom distutils.errors import DistutilsPlatformError\nfrom distutils.util import strtobool\n\nfrom setuptools import setup\n\n\ndef open_local(paths, mode='r', encoding='utf8'):\n path = os.path.join(\n os.path.abspath(os.path.di... | diff --git a/environment.yml b/environment.yml
index 9f416c0e33..e13c76fe9a 100644
--- a/environment.yml
+++ b/environment.yml
@@ -12,7 +12,7 @@ dependencies:
- zlib=1.2.8=0
- pip:
- uvloop>=0.5.3
- - httptools>=0.0.9
+ - httptools>=0.0.10
- ujson>=1.35
- aiofiles>=0.3.0
- websockets>=3.2
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 004f6f9ec8..12b29a2b87 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -3,7 +3,7 @@ aiohttp>=2.3.0,<=3.2.1
chardet<=2.3.0
beautifulsoup4
coverage
-httptools
+httptools>=0.0.10
flake8
pytest==3.3.2
tox
diff --git a/requirements.txt b/requirements.txt
index e320e78181..74d9bf8353 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,5 @@
aiofiles
-httptools
+httptools>=0.0.10
ujson; sys_platform != "win32" and implementation_name == "cpython"
uvloop; sys_platform != "win32" and implementation_name == "cpython"
websockets>=5.0,<6.0
diff --git a/setup.py b/setup.py
index 34703ab4d3..2ce1510f16 100644
--- a/setup.py
+++ b/setup.py
@@ -56,7 +56,7 @@ def open_local(paths, mode='r', encoding='utf8'):
uvloop = 'uvloop>=0.5.3' + env_dependency
requirements = [
- 'httptools>=0.0.9',
+ 'httptools>=0.0.10',
uvloop,
ujson,
'aiofiles>=0.3.0',
|
AnalogJ__lexicon-479 | GoDaddy provider should recognize domaincontrol.com as its nameserver
For the auto provider, it should recognize that domains managed by GoDaddy often have nameservers under the *.domaincontrol.com namespace. You can verify this is GoDaddy via whois; and I also tested this by adding 'domaincontrol.com' to the recognized nameservers with the following patch.
```
--- providers/godaddy.py.orig 2020-01-09 08:58:26.160360574 +0000
+++ providers/godaddy.py 2020-01-10 19:27:29.292030195 +0000
@@ -14,5 +14,5 @@
LOGGER = logging.getLogger(__name__)
-NAMESERVER_DOMAINS = ['godaddy.com']
+NAMESERVER_DOMAINS = ['godaddy.com','domaincontrol.com']
```
And the current whois excerpt:
```
$ whois domaincontrol.com
Domain Name: DOMAINCONTROL.COM
...
Updated Date: 2018-08-07T19:25:37Z
...
Registrant Organization: Go Daddy Operating Company, LLC
Registrant State/Province: Arizona
Registrant Country: US
```
| [
{
"content": "\"\"\"Module provider for Godaddy\"\"\"\nfrom __future__ import absolute_import\nimport hashlib\nimport json\nimport logging\n\nimport requests\nfrom requests.adapters import HTTPAdapter\nfrom urllib3.util.retry import Retry\n\nfrom lexicon.providers.base import Provider as BaseProvider\n\n\nLOGGE... | [
{
"content": "\"\"\"Module provider for Godaddy\"\"\"\nfrom __future__ import absolute_import\nimport hashlib\nimport json\nimport logging\n\nimport requests\nfrom requests.adapters import HTTPAdapter\nfrom urllib3.util.retry import Retry\n\nfrom lexicon.providers.base import Provider as BaseProvider\n\n\nLOGGE... | diff --git a/lexicon/providers/godaddy.py b/lexicon/providers/godaddy.py
index cb1703f68..26c3da0f3 100644
--- a/lexicon/providers/godaddy.py
+++ b/lexicon/providers/godaddy.py
@@ -13,7 +13,7 @@
LOGGER = logging.getLogger(__name__)
-NAMESERVER_DOMAINS = ['godaddy.com']
+NAMESERVER_DOMAINS = ['godaddy.com', 'domaincontrol.com']
def provider_parser(subparser):
|
zigpy__zha-device-handlers-4 | Missing import for types breaking LocalDataCluster
| [
{
"content": "import importlib\nimport pkgutil\nfrom zigpy.quirks import CustomCluster\nfrom zigpy.util import ListenableMixin\n\nUNKNOWN = 'Unknown'\n\n\nclass Bus(ListenableMixin):\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self._listeners = {}\n\n\nclass L... | [
{
"content": "import importlib\nimport pkgutil\nfrom zigpy.quirks import CustomCluster\nfrom zigpy.util import ListenableMixin\nimport zigpy.types as types\n\nUNKNOWN = 'Unknown'\n\n\nclass Bus(ListenableMixin):\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self... | diff --git a/zhaquirks/__init__.py b/zhaquirks/__init__.py
index 571c1f5605..36bf731e71 100644
--- a/zhaquirks/__init__.py
+++ b/zhaquirks/__init__.py
@@ -2,6 +2,7 @@
import pkgutil
from zigpy.quirks import CustomCluster
from zigpy.util import ListenableMixin
+import zigpy.types as types
UNKNOWN = 'Unknown'
|
scikit-image__scikit-image-2821 | Performance regression in morphology.watershed from 0.12.3 to 0.13.0
## Description
Great performance degradation in skimage.morphology.watershed for ver. 0.13.0 in comparison with ver. 0.12.3.
3D images with size about (500, 500, 500) were testes.
## Way to reproduce
```
import numpy as np
from scipy import ndimage
from skimage import feature
from skimage import morphology
import time
dimensions = 500
z, y, x = np.indices((dimensions, dimensions + 5, dimensions + 10))
radius = 100
xc, yc, zc = dimensions // 3 - 6, dimensions // 3, dimensions // 3 + 6
test_image = np.int32((x - xc) ** 2 + (y - yc) ** 2 + (z - zc) ** 2 <= radius ** 2)
test_image[zc - 5, yc - 5, xc - 5] = 0
test_image[zc - 5, yc + 5, xc - 5] = 0
test_image[zc - 5, yc + 4, xc - 5] = 0
xc *= 2
yc *= 2
zc *= 2
test_image[zc:zc + 100, yc:yc + 100, xc:xc + 100] = 2
test_image[zc + 10 + 3, yc + 10 + 3, xc + 10 - 3] = 0
test_image[zc + 10 - 3, yc + 10 - 3, xc + 10 + 3] = 0
test_image[zc + 10 + 3, yc + 10 - 3, xc + 10 + 3] = 0
test_image[zc-5, yc-5, xc-5] = 3
dist = ndimage.distance_transform_edt(test_image)
local_maxi = feature.peak_local_max(dist, min_distance=2, indices=False)
labels, num_obj = ndimage.label(local_maxi)
start_t = time.clock()
labels = morphology.watershed(-dist, labels, connectivity=ndimage.generate_binary_structure(3, 3), mask=test_image)
print(time.clock() - start_t)
```
Processing time for ver. 0.12.3 is about 16 s on my computer.
Processing time for ver. 0.13.0 is about 90 s.
Slowing down about 4 times!!!
Python version 3.5 for Windows
scikit-image versions 0.12.3 and 0.13.0
| [
{
"content": "\"\"\"watershed.py - watershed algorithm\n\nThis module implements a watershed algorithm that apportions pixels into\nmarked basins. The algorithm uses a priority queue to hold the pixels\nwith the metric for the priority queue being pixel value, then the time\nof entry into the queue - this settl... | [
{
"content": "\"\"\"watershed.py - watershed algorithm\n\nThis module implements a watershed algorithm that apportions pixels into\nmarked basins. The algorithm uses a priority queue to hold the pixels\nwith the metric for the priority queue being pixel value, then the time\nof entry into the queue - this settl... | diff --git a/skimage/morphology/_watershed.pyx b/skimage/morphology/_watershed.pyx
index 5d31fe76560..f7f9e1f1f5f 100644
--- a/skimage/morphology/_watershed.pyx
+++ b/skimage/morphology/_watershed.pyx
@@ -23,6 +23,7 @@ ctypedef cnp.int8_t DTYPE_BOOL_t
include "heap_watershed.pxi"
+@cython.wraparound(False)
@cython.boundscheck(False)
@cython.cdivision(True)
@cython.overflowcheck(False)
@@ -40,7 +41,42 @@ cdef inline double _euclid_dist(cnp.int32_t pt0, cnp.int32_t pt1,
return sqrt(result)
+@cython.wraparound(False)
@cython.boundscheck(False)
+@cython.cdivision(True)
+@cython.unraisable_tracebacks(False)
+cdef inline DTYPE_BOOL_t _diff_neighbors(DTYPE_INT32_t[::1] output,
+ DTYPE_INT32_t[::1] structure,
+ DTYPE_BOOL_t[::1] mask,
+ Py_ssize_t index):
+ """
+ Return ``True`` and set ``mask[index]`` to ``False`` if the neighbors of
+ ``index`` (as given by the offsets in ``structure``) have more than one
+ distinct nonzero label.
+ """
+ cdef:
+ Py_ssize_t i, neighbor_index
+ DTYPE_INT32_t neighbor_label0, neighbor_label1
+ Py_ssize_t nneighbors = structure.shape[0]
+
+ if not mask[index]:
+ return True
+
+ neighbor_label0, neighbor_label1 = 0, 0
+ for i in range(nneighbors):
+ neighbor_index = structure[i] + index
+ if mask[neighbor_index]: # neighbor not a watershed line
+ if not neighbor_label0:
+ neighbor_label0 = output[neighbor_index]
+ else:
+ neighbor_label1 = output[neighbor_index]
+ if neighbor_label1 and neighbor_label1 != neighbor_label0:
+ mask[index] = False
+ return True
+ return False
+
+@cython.boundscheck(False)
+@cython.wraparound(False)
def watershed_raveled(cnp.float64_t[::1] image,
DTYPE_INT32_t[::1] marker_locations,
DTYPE_INT32_t[::1] structure,
@@ -89,7 +125,7 @@ def watershed_raveled(cnp.float64_t[::1] image,
cdef Py_ssize_t i = 0
cdef Py_ssize_t age = 1
cdef Py_ssize_t index = 0
- cdef DTYPE_INT32_t wsl_label = -1
+ cdef DTYPE_BOOL_t compact = (compactness > 0)
cdef Heap *hp = <Heap *> heap_from_numpy2()
@@ -100,52 +136,58 @@ def watershed_raveled(cnp.float64_t[::1] image,
elem.index = index
elem.source = index
heappush(hp, &elem)
- if wsl and wsl_label >= output[index]:
- wsl_label = output[index] - 1
while hp.items > 0:
heappop(hp, &elem)
- # this can happen if the same pixel entered the queue
- # several times before being processed.
- if wsl and output[elem.index] == wsl_label:
- # wsl labels are not propagated.
- continue
-
- if output[elem.index] and elem.index != elem.source:
- # non-marker, already visited from another neighbor
- continue
+ if compact or wsl:
+ # in the compact case, we need to label pixels as they come off
+ # the heap, because the same pixel can be pushed twice, *and* the
+ # later push can have lower cost because of the compactness.
+ #
+ # In the case of preserving watershed lines, a similar argument
+ # applies: we can only observe that all neighbors have been labeled
+ # as the pixel comes off the heap. Trying to do so at push time
+ # is a bug.
+ if output[elem.index] and elem.index != elem.source:
+ # non-marker, already visited from another neighbor
+ continue
+ if wsl:
+ # if the current element has different-labeled neighbors and we
+ # want to preserve watershed lines, we mask it and move on
+ if _diff_neighbors(output, structure, mask, elem.index):
+ continue
+ output[elem.index] = output[elem.source]
- output[elem.index] = output[elem.source]
for i in range(nneighbors):
# get the flattened address of the neighbor
- index = structure[i] + elem.index
+ neighbor_index = structure[i] + elem.index
- if not mask[index]:
+ if not mask[neighbor_index]:
+ # this branch includes basin boundaries, aka watershed lines
# neighbor is not in mask
continue
- if wsl and output[index] == wsl_label:
- continue
-
- if output[index]:
- # neighbor has a label (but not wsl_label):
- # the neighbor is not added to the queue.
- if wsl:
- # if the label of the neighbor is different
- # from the label of the pixel taken from the queue,
- # the latter takes the WSL label.
- if output[index] != output[elem.index]:
- output[elem.index] = wsl_label
+ if output[neighbor_index]:
+ # pre-labeled neighbor is not added to the queue.
continue
age += 1
- new_elem.value = image[index]
- if compactness > 0:
+ new_elem.value = image[neighbor_index]
+ if compact:
new_elem.value += (compactness *
- _euclid_dist(index, elem.source, strides))
+ _euclid_dist(neighbor_index, elem.source,
+ strides))
+ elif not wsl:
+ # in the simplest watershed case (no compactness and no
+ # watershed lines), we can label a pixel at the time that
+ # we push it onto the heap, because it can't be reached with
+ # lower cost later.
+ # This results in a very significant performance gain, see:
+ # https://github.com/scikit-image/scikit-image/issues/2636
+ output[neighbor_index] = output[elem.index]
new_elem.age = age
- new_elem.index = index
+ new_elem.index = neighbor_index
new_elem.source = elem.source
heappush(hp, &new_elem)
diff --git a/skimage/morphology/watershed.py b/skimage/morphology/watershed.py
index d98c245e5a8..ff30c2c8314 100644
--- a/skimage/morphology/watershed.py
+++ b/skimage/morphology/watershed.py
@@ -261,8 +261,4 @@ def watershed(image, markers, connectivity=1, offset=None, mask=None,
output = crop(output, pad_width, copy=True)
- if watershed_line:
- min_val = output.min()
- output[output == min_val] = 0
-
return output
|
spacetelescope__jwql-550 | Cron jobs for monitors currently failing
Traceback (most recent call last):
File "/home/jwqladm/repositories/jwql/jwql/instrument_monitors/common_monitors/bias_monitor.py", line 58, in <module>
from jwql.instrument_monitors.common_monitors.dark_monitor import mast_query_darks
File "/home/jwqladm/repositories/jwql/jwql/instrument_monitors/common_monitors/dark_monitor.py", line 77, in <module>
from jwql.jwql_monitors import monitor_mast
File "/home/jwqladm/repositories/jwql/jwql/jwql_monitors/monitor_mast.py", line 25, in <module>
from bokeh.embed import components
File "/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/__init__.py", line 81, in <module>
from .util import logconfig
File "/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/util/logconfig.py", line 87, in <module>
level = settings.py_log_level()
File "/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/settings.py", line 310, in __call__
return self._convert(os.environ[self._env_var])
File "/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/settings.py", line 236, in convert_logging
raise ValueError("Cannot convert {} to log level, valid values are: {}".format(value, ", ".join(_log_levels)))
ValueError: Cannot convert WARN to log level, valid values are: CRITICAL, ERROR, WARNING, INFO, DEBUG, TRACE, NONE
| [
{
"content": "import numpy as np\nfrom setuptools import setup\nfrom setuptools import find_packages\n\nVERSION = '0.22.0'\n\nAUTHORS = 'Matthew Bourque, Misty Cracraft, Joe Filippazzo, Bryan Hilbert, '\nAUTHORS += 'Graham Kanarek, Catherine Martlin, Johannes Sahlmann, Ben Sunnquist'\n\nDESCRIPTION = 'The James... | [
{
"content": "import numpy as np\nfrom setuptools import setup\nfrom setuptools import find_packages\n\nVERSION = '0.22.0'\n\nAUTHORS = 'Matthew Bourque, Misty Cracraft, Joe Filippazzo, Bryan Hilbert, '\nAUTHORS += 'Graham Kanarek, Catherine Martlin, Johannes Sahlmann, Ben Sunnquist'\n\nDESCRIPTION = 'The James... | diff --git a/environment_python_3_6.yml b/environment_python_3_6.yml
index d00a53d5f..2d7ca225a 100644
--- a/environment_python_3_6.yml
+++ b/environment_python_3_6.yml
@@ -3,7 +3,7 @@ channels:
- http://ssb.stsci.edu/astroconda
dependencies:
- astroquery=0.3.10
-- bokeh=1.4.0
+- bokeh>=1.0,<1.4
- django=2.2.5
- flake8=3.7.8
- inflection=0.3.1
diff --git a/requirements.txt b/requirements.txt
index 691234c14..305c99ffb 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,7 +2,7 @@ asdf==2.4.2
astropy==4.0
astroquery==0.4
authlib==0.13
-bokeh==1.4.0
+bokeh>=1.0,<1.4
codecov==2.0.15
django==3.0.3
flake8==3.7.9
diff --git a/setup.py b/setup.py
index de0d0f3ee..b61b1caea 100644
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@
'astropy>=3.2.1',
'astroquery>=0.3.9',
'authlib',
- 'bokeh>=1.0',
+ 'bokeh>=1.0,<1.4',
'codecov',
'django>=2.0',
'flake8',
|
blaze__blaze-873 | Pandas Boolean Arithmetic Display
Boolean arithmetic with a `Data(pandas.DataFrame)` seems to mess up its interactive display, but the actual expression is fully funcitonal.
``` Python
>>> import blaze as bz
>>> import pandas as pd
>>> data = bz.Data(pd.read_csv("iris.csv"))
>>> data
SepalLength SepalWidth PetalLength PetalWidth Name
0 5.1 3.5 1.4 0.2 Iris-setosa
1 4.9 3.0 1.4 0.2 Iris-setosa
2 4.7 3.2 1.3 0.2 Iris-setosa
3 4.6 3.1 1.5 0.2 Iris-setosa
4 5.0 3.6 1.4 0.2 Iris-setosa
5 5.4 3.9 1.7 0.4 Iris-setosa
6 4.6 3.4 1.4 0.3 Iris-setosa
7 5.0 3.4 1.5 0.2 Iris-setosa
8 4.4 2.9 1.4 0.2 Iris-setosa
9 4.9 3.1 1.5 0.1 Iris-setosa
...
>>> (data.SepalLength > 5.0) & (data.SepalLength < 5.5)
Empty DataFrame
Columns: [None]
Index: []
>>> bz.compute( (data.SepalLength > 5.0) & (data.SepalLength < 5.5) )
0 True
1 False
2 False
3 False
4 False
...
145 False
146 False
147 False
148 False
149 False
Name: SepalLength, Length: 150, dtype: bool
>>> data[(data.SepalLength > 5.0) & (data.SepalLength < 5.5)]
SepalLength SepalWidth PetalLength PetalWidth Name
0 5.1 3.5 1.4 0.2 Iris-setosa
5 5.4 3.9 1.7 0.4 Iris-setosa
10 5.4 3.7 1.5 0.2 Iris-setosa
16 5.4 3.9 1.3 0.4 Iris-setosa
17 5.1 3.5 1.4 0.3 Iris-setosa
19 5.1 3.8 1.5 0.3 Iris-setosa
20 5.4 3.4 1.7 0.2 Iris-setosa
21 5.1 3.7 1.5 0.4 Iris-setosa
23 5.1 3.3 1.7 0.5 Iris-setosa
27 5.2 3.5 1.5 0.2 Iris-setosa
...
```
| [
{
"content": "from __future__ import absolute_import, division, print_function\n\nimport operator\nfrom toolz import first\nimport numpy as np\nfrom datashape import dshape, var, DataShape\nfrom dateutil.parser import parse as dt_parse\nfrom datashape.predicates import isscalar\nfrom datashape import coretypes ... | [
{
"content": "from __future__ import absolute_import, division, print_function\n\nimport operator\nfrom toolz import first\nimport numpy as np\nfrom datashape import dshape, var, DataShape\nfrom dateutil.parser import parse as dt_parse\nfrom datashape.predicates import isscalar\nfrom datashape import coretypes ... | diff --git a/blaze/expr/arithmetic.py b/blaze/expr/arithmetic.py
index d829b2e64..40a40dd0f 100644
--- a/blaze/expr/arithmetic.py
+++ b/blaze/expr/arithmetic.py
@@ -46,6 +46,8 @@ def _name(self):
return l
if r and not l:
return r
+ if l == r:
+ return l
@property
def _inputs(self):
diff --git a/blaze/expr/tests/test_arithmetic.py b/blaze/expr/tests/test_arithmetic.py
index cf8eb0ed9..50a076605 100644
--- a/blaze/expr/tests/test_arithmetic.py
+++ b/blaze/expr/tests/test_arithmetic.py
@@ -32,6 +32,8 @@ def test_names():
assert Add(y, x)._name != x._name
assert Add(y, x)._name != y._name
+ assert Add(x, x)._name == x._name
+
def test_inputs():
assert (x + y)._inputs == (x, y)
assert (x + 1)._inputs == (x,)
|
OctoPrint__OctoPrint-407 | Support circular beds in g-code visualiser
With delta printers we have center of bed at center of coordinate system. So now in G-code visualizer i get my objects in corner of bed. Pronterface has offset or center setting for this case, and repetier host has just checkbox "origin in center of bed" or similar.
Also would be nice to have round grid, like that in pronterface.
| [
{
"content": "# coding=utf-8\n__author__ = \"Gina Häußge <osd@foosel.net>\"\n__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'\n\nimport sys\nimport os\nimport yaml\nimport logging\nimport re\nimport uuid\n\nAPPNAME=\"OctoPrint\"\n\ninstance = None\n\ndef settings(init=Fals... | [
{
"content": "# coding=utf-8\n__author__ = \"Gina Häußge <osd@foosel.net>\"\n__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'\n\nimport sys\nimport os\nimport yaml\nimport logging\nimport re\nimport uuid\n\nAPPNAME=\"OctoPrint\"\n\ninstance = None\n\ndef settings(init=Fals... | diff --git a/src/octoprint/settings.py b/src/octoprint/settings.py
index 3c796e678b..8c2684bf0f 100644
--- a/src/octoprint/settings.py
+++ b/src/octoprint/settings.py
@@ -97,7 +97,7 @@ def settings(init=False, configfile=None, basedir=None):
{"x": 0.0, "y": 0.0}
],
"bedDimensions": {
- "x": 200.0, "y": 200.0
+ "x": 200.0, "y": 200.0, "r": 100
}
},
"appearance": {
diff --git a/src/octoprint/static/gcodeviewer/js/renderer.js b/src/octoprint/static/gcodeviewer/js/renderer.js
index ac5a53d2ce..7fe9c032bc 100644
--- a/src/octoprint/static/gcodeviewer/js/renderer.js
+++ b/src/octoprint/static/gcodeviewer/js/renderer.js
@@ -186,37 +186,68 @@ GCODE.renderer = (function(){
ctx.translate(offsetBedX, offsetBedY);
- ctx.beginPath();
- var width = renderOptions["bed"]["x"] * zoomFactor;
- var height = renderOptions["bed"]["y"] * zoomFactor;
- var origin = {
- x: 0,
- y: -1 * renderOptions["bed"]["y"] * zoomFactor
- };
- ctx.strokeStyle = renderOptions["colorGrid"];
- ctx.fillStyle = "#ffffff";
- ctx.lineWidth = 2;
- ctx.rect(origin.x, origin.y, width, height);
- ctx.fill();
- ctx.stroke();
-
var i;
- ctx.strokeStyle = renderOptions["colorGrid"];
- ctx.lineWidth = 1;
- ctx.beginPath();
- for (i = 0; i <= renderOptions["bed"]["x"]; i += gridStep) {
- ctx.moveTo(i * zoomFactor, 0);
- ctx.lineTo(i * zoomFactor, -1 * renderOptions["bed"]["y"] * zoomFactor);
- }
- ctx.stroke();
+ if(renderOptions["bed"]["circular"]) {
+ ctx.strokeStyle = renderOptions["colorGrid"];
+ ctx.fillStyle = "#ffffff";
+ ctx.lineWidth = 2;
+
+ ctx.beginPath();
+ ctx.arc(0, 0, renderOptions["bed"]["r"] * zoomFactor, 0, Math.PI * 2, true);
+ ctx.fill();
+ ctx.stroke();
+
+ ctx.strokeStyle = renderOptions["colorGrid"];
+ ctx.lineWidth = 1;
+
+ ctx.beginPath();
+ for (i = -renderOptions["bed"]["r"]; i <= renderOptions["bed"]["r"]; i += gridStep) {
+ var x = i;
+ var y = Math.sqrt(Math.pow(renderOptions["bed"]["r"], 2) - Math.pow(x, 2));
+
+ ctx.moveTo(x * zoomFactor, y * zoomFactor);
+ ctx.lineTo(x * zoomFactor, -1 * y * zoomFactor);
+
+ ctx.moveTo(y * zoomFactor, x * zoomFactor);
+ ctx.lineTo(-1 * y * zoomFactor, x * zoomFactor);
+ }
+ ctx.stroke();
+ } else {
+ var width = renderOptions["bed"]["x"] * zoomFactor;
+ var height = renderOptions["bed"]["y"] * zoomFactor;
+ var origin = {
+ x: 0,
+ y: -1 * renderOptions["bed"]["y"] * zoomFactor
+ };
- ctx.beginPath();
- for (i = 0; i <= renderOptions["bed"]["y"]; i += gridStep) {
- ctx.moveTo(0, -1 * i * zoomFactor);
- ctx.lineTo(renderOptions["bed"]["x"] * zoomFactor, -1 * i * zoomFactor);
+ ctx.beginPath();
+ ctx.strokeStyle = renderOptions["colorGrid"];
+ ctx.fillStyle = "#ffffff";
+ ctx.lineWidth = 2;
+
+ ctx.rect(origin.x, origin.y, width, height);
+
+ ctx.fill();
+ ctx.stroke();
+
+ ctx.strokeStyle = renderOptions["colorGrid"];
+ ctx.lineWidth = 1;
+
+ ctx.beginPath();
+ for (i = 0; i <= renderOptions["bed"]["x"]; i += gridStep) {
+ ctx.moveTo(i * zoomFactor, 0);
+ ctx.lineTo(i * zoomFactor, -1 * renderOptions["bed"]["y"] * zoomFactor);
+ }
+ ctx.stroke();
+
+ ctx.beginPath();
+ for (i = 0; i <= renderOptions["bed"]["y"]; i += gridStep) {
+ ctx.moveTo(0, -1 * i * zoomFactor);
+ ctx.lineTo(renderOptions["bed"]["x"] * zoomFactor, -1 * i * zoomFactor);
+ }
+ ctx.stroke();
}
- ctx.stroke();
ctx.translate(-offsetBedX, -offsetBedY);
};
@@ -358,6 +389,12 @@ GCODE.renderer = (function(){
offsetModelY = -1 * (renderOptions["bed"]["y"] / 2 - (mdlInfo.min.y + mdlInfo.modelSize.y / 2)) * zoomFactor;
offsetBedX = -1 * (renderOptions["bed"]["x"] / 2 - (mdlInfo.min.x + mdlInfo.modelSize.x / 2)) * zoomFactor;
offsetBedY = (renderOptions["bed"]["y"] / 2 - (mdlInfo.min.y + mdlInfo.modelSize.y / 2)) * zoomFactor;
+ } else if (renderOptions["bed"]["circular"]) {
+ var canvasCenter = ctx.transformedPoint(canvas.width / 2, canvas.height / 2);
+ offsetModelX = canvasCenter.x;
+ offsetModelY = canvasCenter.y;
+ offsetBedX = 0;
+ offsetBedY = 0;
} else {
offsetModelX = 0;
offsetModelY = 0;
@@ -397,8 +434,13 @@ GCODE.renderer = (function(){
init: function(){
startCanvas();
initialized = true;
- zoomFactor = Math.min((canvas.width - 10) / renderOptions["bed"]["x"], (canvas.height - 10) / renderOptions["bed"]["y"]);
- ctx.translate((canvas.width - renderOptions["bed"]["x"] * zoomFactor) / 2, renderOptions["bed"]["y"] * zoomFactor + (canvas.height - renderOptions["bed"]["y"] * zoomFactor) / 2);
+ var bedWidth = renderOptions["bed"]["x"];
+ var bedHeight = renderOptions["bed"]["y"];;
+ if(renderOptions["bed"]["circular"]) {
+ bedWidth = bedHeight = renderOptions["bed"]["r"] *2;
+ }
+ zoomFactor = Math.min((canvas.width - 10) / bedWidth, (canvas.height - 10) / bedHeight);
+ ctx.translate((canvas.width - bedWidth * zoomFactor) / 2, bedHeight * zoomFactor + (canvas.height - bedHeight * zoomFactor) / 2);
offsetModelX = 0;
offsetModelY = 0;
diff --git a/src/octoprint/static/js/app/viewmodels/settings.js b/src/octoprint/static/js/app/viewmodels/settings.js
index eef8d58473..8037974302 100644
--- a/src/octoprint/static/js/app/viewmodels/settings.js
+++ b/src/octoprint/static/js/app/viewmodels/settings.js
@@ -68,16 +68,22 @@ function SettingsViewModel(loginStateViewModel, usersViewModel) {
self.printer_bedDimensionX = ko.observable(undefined);
self.printer_bedDimensionY = ko.observable(undefined);
+ self.printer_bedDimensionR = ko.observable(undefined);
+ self.printer_bedCircular = ko.observable(undefined);
self.printer_bedDimensions = ko.computed({
read: function () {
return {
x: parseFloat(self.printer_bedDimensionX()),
- y: parseFloat(self.printer_bedDimensionY())
+ y: parseFloat(self.printer_bedDimensionY()),
+ r: parseFloat(self.printer_bedDimensionR()),
+ circular: self.printer_bedCircular()
};
},
write: function(value) {
self.printer_bedDimensionX(value.x);
self.printer_bedDimensionY(value.y);
+ self.printer_bedDimensionR(value.r);
+ self.printer_bedCircular(value.circular);
},
owner: self
});
diff --git a/src/octoprint/templates/settings.jinja2 b/src/octoprint/templates/settings.jinja2
index 4c29e6b179..1a3d8ace8b 100644
--- a/src/octoprint/templates/settings.jinja2
+++ b/src/octoprint/templates/settings.jinja2
@@ -148,7 +148,7 @@
</div>
<div class="control-group">
<label class="control-label" for="settings-bedSize">Bed Size</label>
- <div class="controls form-inline">
+ <div class="controls form-inline" data-bind="ifnot: printer_bedCircular">
<label>X:</label>
<div class="input-append">
<input type="number" step="0.01" class="input-mini text-right" data-bind="value: printer_bedDimensionX" id="settings-bedX">
@@ -160,6 +160,18 @@
<span class="add-on">mm</span>
</div>
</div>
+ <div class="controls form-inline" data-bind="if: printer_bedCircular">
+ <label>Radius:</label>
+ <div class="input-append">
+ <input type="number" step="0.01" class="input-mini text-right" data-bind="value: printer_bedDimensionR" id="settings-bedR">
+ <span class="add-on">mm</span>
+ </div>
+ </div>
+ <div class="controls form-inline">
+ <label class="checkbox">
+ <input type="checkbox" data-bind="checked: printer_bedCircular" id="settings-bedCircular">Circular
+ </label>
+ </div>
</div>
</form>
</div>
|
NVIDIA__NVFlare-1314 | [BUG] Command "nvflare" not found
Unfortunately I can't run nvflare on Ubuntu because it tells me that the command (`nvflare simulator -h`) is not found. Even if I use the Docker solution. Inside the docker it also shows "command not found". Thus unable to run local experiments.
I tried the MWE from https://nvflare.readthedocs.io/en/2.2.1/getting_started.html
Used OS: Ubuntu 18.04.5 LTS
| [
{
"content": "# Copyright (c) 2021, NVIDIA CORPORATION.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless require... | [
{
"content": "# Copyright (c) 2021, NVIDIA CORPORATION.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless require... | diff --git a/docs/conf.py b/docs/conf.py
index 8c2b6cb40f..e3989174fd 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -76,6 +76,7 @@ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
"sphinx.ext.autosectionlabel",
+ "sphinx_copybutton",
]
autoclass_content = "both"
diff --git a/docs/example_applications_algorithms.rst b/docs/example_applications_algorithms.rst
index 0fd523e450..633bdf01f8 100644
--- a/docs/example_applications_algorithms.rst
+++ b/docs/example_applications_algorithms.rst
@@ -41,15 +41,18 @@ The following quickstart guides walk you through some of these examples:
* :ref:`Hello TensorFlow <hello_tf2>` - Example image classifier using FedAvg and TensorFlow as the deep learning training frameworks
2. **FL algorithms**
+
* `Federated Learning with CIFAR-10 (GitHub) <https://github.com/NVIDIA/NVFlare/tree/main/examples/cifar10>`_ - Includes examples of using FedAvg, FedProx, FedOpt, SCAFFOLD, homomorphic encryption, and streaming of TensorBoard metrics to the server during training
* `Federated XGBoost (GitHub) <https://github.com/NVIDIA/NVFlare/tree/main/examples/xgboost>`_ - Includes examples of histogram-based and tree-based algorithms. Tree-based algorithms also includes bagging and cyclic approaches
3. **Medical Image Analysis**
+
* `Hello MONAI Bundle (GitHub) <https://github.com/NVIDIA/NVFlare/tree/main/examples/hello-monai-bundle>`_ - For an example of using NVIDIA FLARE to train a 3D medical image analysis model using federated averaging (FedAvg) and MONAI Bundle `MONAI <https://monai.io/>`_
* `Federated Learning with Differential Privacy for BraTS18 segmentation (GitHub) <https://github.com/NVIDIA/NVFlare/tree/main/examples/brats18>`_ - Illustrates the use of differential privacy for training brain tumor segmentation models using federated learning
* `Federated Learning for Prostate Segmentation from Multi-source Data (GitHub) <https://github.com/NVIDIA/NVFlare/tree/main/examples/prostate>`_ - Example of training a multi-institutional prostate segmentation model using `FedAvg <https://arxiv.org/abs/1602.05629>`_, `FedProx <https://arxiv.org/abs/1812.06127>`_, and `Ditto <https://arxiv.org/abs/2012.04221>`_
4. **Federated Statistics**
+
* :ref:`Federated Statistic Overview <federated_statistics>` - Discuss the overall federated statistics features
* `Federated Statistics for medical imaging (Github) <https://github.com/NVIDIA/NVFlare/tree/main/examples/federated_statistics/image_stats/README.md>`_ - Example of gathering local image histogram to compute the global dataset histograms.
* `Federated Statistics for tabular data with DataFrame (Github) <https://github.com/NVIDIA/NVFlare/tree/main/examples/federated_statistics/df_stats/README.md>`_ - Example of gathering local statistics summary from Pandas DataFrame to compute the global dataset statistics.
@@ -115,10 +118,12 @@ for an example with ditto in addition to FedProx, FedAvg, and centralized traini
Federated XGBoost
^^^^^^^^^^^^^^^^^
+
* `Federated XGBoost (GitHub) <https://github.com/NVIDIA/NVFlare/tree/main/examples/xgboost>`_ - Includes examples of histogram-based and tree-based algorithms. Tree-based algorithms also includes bagging and cyclic approaches
Federated Analytics
^^^^^^^^^^^^^^^^^^^
+
* `Federated Statistics for medical imaging (Github) <https://github.com/NVIDIA/NVFlare/tree/main/examples/federated_statistics/image_stats/README.md>`_ - Example of gathering local image histogram to compute the global dataset histograms.
* `Federated Statistics for tabular data with DataFrame (Github) <https://github.com/NVIDIA/NVFlare/tree/main/examples/federated_statistics/df_stats/README.md>`_ - Example of gathering local statistics summary from Pandas DataFrame to compute the global dataset statistics.
* `Federated Statistics with Monai Statistics integration for Spleen CT Image (Github) <https://github.com/NVIDIA/NVFlare/tree/main/integration/monai/examples/spleen_ct_segmentation/README.md>`_ - Example demonstrated Monai statistics integration and few other features in federated statistics
diff --git a/docs/examples/hello_cross_val.rst b/docs/examples/hello_cross_val.rst
index 0741177092..5085982d68 100644
--- a/docs/examples/hello_cross_val.rst
+++ b/docs/examples/hello_cross_val.rst
@@ -9,7 +9,7 @@ Before You Start
Before jumping into this guide, make sure you have an environment
with `NVIDIA FLARE <https://pypi.org/project/nvflare/>`_ installed.
-You can follow the :ref:`installation <installation>` guide on the general concept of setting up a
+You can follow :ref:`getting_started` on the general concept of setting up a
Python virtual environment (the recommended environment) and how to install NVIDIA FLARE.
Prerequisite
@@ -115,8 +115,8 @@ Application Configuration
Inside the config folder there are two files, ``config_fed_client.json`` and ``config_fed_server.json``.
-.. literalinclude:: ../../examples/hello-numpy-cross-val/app/config/config_fed_server.json
- :language: python
+.. literalinclude:: ../../examples/hello-world/hello-numpy-cross-val/app/config/config_fed_server.json
+ :language: json
:linenos:
:caption: config_fed_server.json
@@ -126,8 +126,8 @@ The components "model_locator" and "formatter" have been added to work with the
and the rest is the same as in :doc:`Hello Scatter and Gather <hello_scatter_and_gather>`.
-.. literalinclude:: ../../examples/hello-numpy-cross-val/app/config/config_fed_client.json
- :language: python
+.. literalinclude:: ../../examples/hello-world/hello-numpy-cross-val/app/config/config_fed_client.json
+ :language: json
:linenos:
:caption: config_fed_client.json
diff --git a/docs/examples/hello_pt.rst b/docs/examples/hello_pt.rst
index a7a21311c7..5a3a4de399 100644
--- a/docs/examples/hello_pt.rst
+++ b/docs/examples/hello_pt.rst
@@ -11,7 +11,7 @@ to learn more about the specifics of `NVIDIA FLARE <https://pypi.org/project/nvf
Make sure you have an environment with NVIDIA FLARE installed.
-You can follow the :ref:`installation <installation>` guide on the general concept of setting up a
+You can follow :ref:`getting_started` on the general concept of setting up a
Python virtual environment (the recommended environment) and how to install NVIDIA FLARE.
@@ -77,11 +77,8 @@ architecture are modified from
Let's see what an extremely simplified CIFAR10 training looks like:
-.. literalinclude:: ../../examples/hello-pt/app/custom/simple_network.py
+.. literalinclude:: ../../examples/hello-world/hello-pt/app/custom/simple_network.py
:language: python
- :lines: 15-
- :lineno-start: 15
- :linenos:
:caption: simple_network.py
This ``SimpleNetwork`` class is your convolutional neural network to train with the CIFAR10 dataset.
@@ -101,11 +98,8 @@ You can think of all of this code as part of your local training loop, as every
Since you will encapsulate every training-related step in the ``Cifar10Trainer`` class,
let's put this preparation stage into the ``__init__`` method:
-.. literalinclude:: ../../examples/hello-pt/app/custom/cifar10trainer.py
+.. literalinclude:: ../../examples/hello-world/hello-pt/app/custom/cifar10trainer.py
:language: python
- :lines: 37-82
- :lineno-start: 37
- :linenos:
Local Train
@@ -114,7 +108,7 @@ Local Train
Now that you have your network and dataset setup, in the ``Cifar10Trainer`` class.
Let's also implement a local training loop in a method called ``local_train``:
-.. literalinclude:: ../../examples/hello-pt/app/custom/cifar10trainer.py
+.. literalinclude:: ../../examples/hello-world/hello-pt/app/custom/cifar10trainer.py
:language: python
:pyobject: Cifar10Trainer.local_train
@@ -144,7 +138,7 @@ We can then call our local train inside the ``execute`` method.
Take a look at the following code:
-.. literalinclude:: ../../examples/hello-pt/app/custom/cifar10trainer.py
+.. literalinclude:: ../../examples/hello-world/hello-pt/app/custom/cifar10trainer.py
:language: python
:pyobject: Cifar10Trainer.execute
@@ -187,7 +181,7 @@ Application Configuration
Inside the config folder there are two files, ``config_fed_client.json`` and ``config_fed_server.json``.
-.. literalinclude:: ../../examples/hello-pt/app/config/config_fed_client.json
+.. literalinclude:: ../../examples/hello-world/hello-pt/app/config/config_fed_client.json
:language: json
:linenos:
:caption: config_fed_client.json
@@ -205,7 +199,7 @@ The "validate" task for ``Cifar10Validator`` and the "submit_model" task are use
so we will be addressing that in a later example.
-.. literalinclude:: ../../examples/hello-pt/app/config/config_fed_server.json
+.. literalinclude:: ../../examples/hello-world/hello-pt/app/config/config_fed_server.json
:language: json
:linenos:
:caption: config_fed_server.json
diff --git a/docs/examples/hello_pt_tb.rst b/docs/examples/hello_pt_tb.rst
index e9228b15bf..715e83729b 100644
--- a/docs/examples/hello_pt_tb.rst
+++ b/docs/examples/hello_pt_tb.rst
@@ -23,7 +23,7 @@ The setup of this exercise consists of one **server** and two **clients**.
Let's get started. Make sure you have an environment with NVIDIA FLARE installed as described in
-:doc:`quickstart <../quickstart>` guide. First clone the repo:
+:ref:`getting_started`. First clone the repo:
.. code-block:: shell
@@ -42,7 +42,7 @@ Adding TensorBoard Streaming to Configurations
Inside the config folder there are two files, ``config_fed_client.json`` and ``config_fed_server.json``.
-.. literalinclude:: ../../examples/hello-pt-tb/app/config/config_fed_client.json
+.. literalinclude:: ../../examples/hello-world/hello-pt-tb/app/config/config_fed_client.json
:language: json
:linenos:
:caption: config_fed_client.json
@@ -60,7 +60,7 @@ which converts local events to federated events.
This changes the event ``analytix_log_stats`` into a fed event ``fed.analytix_log_stats``,
which will then be streamed from the clients to the server.
-.. literalinclude:: ../../examples/hello-pt-tb/app/config/config_fed_server.json
+.. literalinclude:: ../../examples/hello-world/hello-pt-tb/app/config/config_fed_server.json
:language: json
:linenos:
:caption: config_fed_server.json
@@ -83,7 +83,7 @@ In this exercise, all of the TensorBoard code additions will be made in ``pt_lea
First we must initialize our TensorBoard writer to the ``AnalyticsSender`` we defined in the client config:
-.. literalinclude:: ../../examples/hello-pt-tb/app/custom/pt_learner.py
+.. literalinclude:: ../../examples/hello-world/hello-pt-tb/app/custom/pt_learner.py
:language: python
:lines: 61, 89-92
:lineno-start: 61
@@ -98,7 +98,7 @@ but we can also define it in the client config to be passed into the constructor
Now that our TensorBoard writer is set to ``AnalyticsSender``,
we can write and stream training metrics to the server in ``local_train()``:
-.. literalinclude:: ../../examples/hello-pt-tb/app/custom/pt_learner.py
+.. literalinclude:: ../../examples/hello-world/hello-pt-tb/app/custom/pt_learner.py
:language: python
:lines: 127-159
:lineno-start: 127
diff --git a/docs/examples/hello_scatter_and_gather.rst b/docs/examples/hello_scatter_and_gather.rst
index 3e0394a7b7..ef04dced79 100644
--- a/docs/examples/hello_scatter_and_gather.rst
+++ b/docs/examples/hello_scatter_and_gather.rst
@@ -9,7 +9,7 @@ Before You Start
Before jumping into this guide, make sure you have an environment with
`NVIDIA FLARE <https://pypi.org/project/nvflare/>`_ installed.
-You can follow the :ref:`installation <installation>` guide on the general concept of setting up a
+You can follow :ref:`getting_started` on the general concept of setting up a
Python virtual environment (the recommended environment) and how to install NVIDIA FLARE.
@@ -121,12 +121,12 @@ Inside the config folder there are two files, ``config_fed_client.json`` and ``c
For now, the default configurations are sufficient.
-.. literalinclude:: ../../examples/hello-numpy-sag/app/config/config_fed_server.json
+.. literalinclude:: ../../examples/hello-world/hello-numpy-sag/app/config/config_fed_server.json
:language: json
:linenos:
:caption: config_fed_server.json
-.. literalinclude:: ../../examples/hello-numpy-sag/app/config/config_fed_client.json
+.. literalinclude:: ../../examples/hello-world/hello-numpy-sag/app/config/config_fed_client.json
:language: json
:linenos:
:caption: config_fed_client.json
diff --git a/docs/examples/hello_tf2.rst b/docs/examples/hello_tf2.rst
index b877c8799b..3a1c8b118f 100644
--- a/docs/examples/hello_tf2.rst
+++ b/docs/examples/hello_tf2.rst
@@ -55,7 +55,7 @@ with two clients and one server.
Before you start, let's see what a simplified MNIST network looks like.
-.. literalinclude:: ../../examples/hello-tf2/app/custom/tf2_net.py
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/custom/tf2_net.py
:language: python
:lines: 15-
:lineno-start: 15
@@ -79,7 +79,7 @@ Additionally, you must setup the optimizer, loss function and transform to proce
Since every step will be encapsulated in the ``SimpleTrainer`` class,
let's put this preparation stage into one method ``setup``:
-.. literalinclude:: ../../examples/hello-tf2/app/custom/trainer.py
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/custom/trainer.py
:language: python
:lines: 41-71
:lineno-start: 41
@@ -94,7 +94,7 @@ NVIDIA FLARE enters or leaves a certain stage.
In this case, there is an ``Event`` called ``EventType.START_RUN`` which perfectly matches these requirements.
Because our trainer is a subclass of ``FLComponent``, you can implement the handler to handle the event and call the setup method:
-.. literalinclude:: ../../examples/hello-tf2/app/custom/trainer.py
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/custom/trainer.py
:language: python
:lines: 37-39
:lineno-start: 37
@@ -119,14 +119,14 @@ Link NVIDIA FLARE with Local Train
Take a look at the following code:
-.. literalinclude:: ../../examples/hello-tf2/app/custom/trainer.py
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/custom/trainer.py
:language: python
:pyobject: SimpleTrainer.execute
Every NVIDIA FLARE client receives the model weights from the server in the :ref:`shareable <shareable>`.
This application uses the ``exclude_var`` filter, so make sure to replace the missing layer with weights from the clients' previous training round:
-.. literalinclude:: ../../examples/hello-tf2/app/custom/trainer.py
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/custom/trainer.py
:language: python
:lines: 111-115
:lineno-start: 111
@@ -134,7 +134,7 @@ This application uses the ``exclude_var`` filter, so make sure to replace the mi
Now update the local model with those received weights:
-.. literalinclude:: ../../examples/hello-tf2/app/custom/trainer.py
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/custom/trainer.py
:language: python
:lines: 118
:lineno-start: 118
@@ -142,7 +142,7 @@ Now update the local model with those received weights:
Then perform a simple :code:`self.model.fit` so the client's model is trained with its own dataset:
-.. literalinclude:: ../../examples/hello-tf2/app/custom/trainer.py
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/custom/trainer.py
:language: python
:lines: 122-127
:lineno-start: 122
@@ -165,7 +165,7 @@ For this exercise, we use a basic ``exclude_var`` filter to exclude the variable
as it goes outbound from the client to the server. The excluded layer is replaced with all zeros of the same shape,
which reduces compression size and ensures that the clients' weights for this variable are not shared with the server.
-.. literalinclude:: ../../examples/hello-tf2/app/custom/filter.py
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/custom/filter.py
:language: python
:lines: 15-
:lineno-start: 15
@@ -192,7 +192,7 @@ Model Persistor
The model persistor is used to load and save models on the server.
-.. literalinclude:: ../../examples/hello-tf2/app/custom/tf2_model_persistor.py
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/custom/tf2_model_persistor.py
:language: python
:lines: 15-
:lineno-start: 15
@@ -212,7 +212,7 @@ Application Configuration
Finally, inside the config folder there are two files, ``config_fed_client.json`` and ``config_fed_server.json``.
-.. literalinclude:: ../../examples/hello-tf2/app/config/config_fed_server.json
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/config/config_fed_server.json
:language: json
:linenos:
:caption: config_fed_server.json
@@ -225,7 +225,7 @@ The ``persistor`` is configured to use ``TF2ModelPersistor`` in the custom direc
Python module paths.
-.. literalinclude:: ../../examples/hello-tf2/app/config/config_fed_client.json
+.. literalinclude:: ../../examples/hello-world/hello-tf2/app/config/config_fed_client.json
:language: json
:linenos:
:caption: config_fed_client.json
diff --git a/docs/getting_started.rst b/docs/getting_started.rst
index a2fc8695d1..4aeb780e45 100644
--- a/docs/getting_started.rst
+++ b/docs/getting_started.rst
@@ -1,4 +1,4 @@
-.. _quickstart:
+.. _getting_started:
###############
Getting Started
@@ -98,34 +98,34 @@ environment.
To get started with a containerized deployment, you will first need to install a supported
container runtime and the NVIDIA Container Toolkit to enable support for GPUs. System requirements
-and instructions for this can be found in the `NVIDIA Container Toolkit Install Guide <https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html>`.
+and instructions for this can be found in the `NVIDIA Container Toolkit Install Guide <https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html>`_.
A simple Dockerfile is used to capture the base requirements and dependencies. In
this case, we're building an environment that will support PyTorch-based workflows,
in particular the `Hello PyTorch with Tensorboard Streaming <https://github.com/NVIDIA/NVFlare/tree/main/examples/hello-pt-tb>`_
example. The base for this build is the NGC PyTorch container. On this base image,
we will install the necessary dependencies and clone the NVIDIA FLARE GitHub
-source code into the root workspace directory. To create a Dockerfile, create a file named ``Dockerfile``
-using any text editor and include the following:
+source code into the root workspace directory.
-.. code-block:: dockerfile
+Let's first create a folder called ``build`` and then create a file inside named ``Dockerfile``:
- ARG PYTORCH_IMAGE=nvcr.io/nvidia/pytorch:22.09-py3
- FROM ${PYTORCH_IMAGE}
+.. code-block:: shell
+
+ mkdir build
+ cd build
+ touch Dockerfile
- RUN python3 -m pip install -U pip
- RUN python3 -m pip install -U setuptools
- RUN python3 -m pip install torch torchvision tensorboard nvflare
+Using any text editor to edit the Dockerfile and paste the following:
- WORKDIR /workspace/
- RUN git clone https://github.com/NVIDIA/NVFlare.git
+.. literalinclude:: resources/Dockerfile.doc
+ :language: dockerfile
We can then build the new container by running docker build in the directory containing
this Dockerfile, for example tagging it nvflare-pt:
.. code-block:: shell
- docker build -t nvflare-pt .
+ docker build -t nvflare-pt . -f Dockerfile
This will result in a docker image, ``nvflare-pt:latest``. You can run this container with Docker,
in this example mounting a local ``my-workspace`` directory into the container for use as a persistent
diff --git a/docs/programming_guide/high_availability.rst b/docs/programming_guide/high_availability.rst
index e34c3504fe..2f14b5ccc3 100644
--- a/docs/programming_guide/high_availability.rst
+++ b/docs/programming_guide/high_availability.rst
@@ -187,8 +187,8 @@ occurs, the new SP will restore the FLContext, the Job workspace, and all the co
depending on when the state is persisted, there is potentially a portion of work that may still be lost when the state
is restored.
-FLCompoent
-==========
+FLComponent
+===========
Each FLComponent has its implementation to decide what kind of data it needs to persist and migrate, and then how
to restore from the persisted data.
diff --git a/docs/programming_guide/provisioning_system.rst b/docs/programming_guide/provisioning_system.rst
index 1928cfdbc0..803b49fdea 100644
--- a/docs/programming_guide/provisioning_system.rst
+++ b/docs/programming_guide/provisioning_system.rst
@@ -65,7 +65,9 @@ they can add API calls to Open Provision API to generate required outputs.
Provisioner
-----------
This is the container class that owns all instances of Project, Workspace, Provision Context, Builders and Participants,
-as shown in the above diagram. A typical usage of this class is like the following::
+as shown in the above diagram. A typical usage of this class is like the following:
+
+.. code-block:: python
provisioner = Provisioner(workspace_full_path, builders)
@@ -74,7 +76,9 @@ as shown in the above diagram. A typical usage of this class is like the follow
Project
-------
The Project class keeps information about participants. Therefore, information of any participant can be retrieved from
-the Project instance::
+the Project instance:
+
+.. code-block:: python
class Project(object):
def __init__(self, name: str, description: str, participants: List[Participant]):
@@ -102,7 +106,9 @@ Participant
-----------
Each participant is one entity that communicates with other participants inside the NVIDIA FLARE system during runtime.
Each participant has the following attributes: type, name, org and props. The attribute ``props`` is a dictionary and
-stores additional information::
+stores additional information:
+
+.. code-block:: python
class Participant(object):
def __init__(self, type: str, name: str, org: str, *args, **kwargs):
@@ -145,7 +151,9 @@ builders being called after it will not be able to access the wip folder.
.. note:: The collaboration among all builders is the responsibility of Open Provision API developers.
-Every builder has to subclass the Builder class and override one or more of these three methods::
+Every builder has to subclass the Builder class and override one or more of these three methods:
+
+.. code-block:: python
class Builder(ABC):
def initialize(self, ctx: dict):
@@ -187,14 +195,18 @@ is called before other builders' finalize methods and before other builders' bui
Case 1: generating additional files
-----------------------------------
The developers would like to add a configuration file about a database server to admin participants. The configuration
-is like this::
+is like this:
+
+.. code-block:: yaml
[database]
db_server = server name
db_port = port_number
user_name = admin's name
-As this requires adding one file to every admin participant, the developer can write a DBBuilder as follows::
+As this requires adding one file to every admin participant, the developer can write a DBBuilder as follows:
+
+.. code-block:: python
class DBConfigBuilder(Builder):
def __init__(self, db_server, db_port):
@@ -210,7 +222,9 @@ As this requires adding one file to every admin participant, the developer can w
f.write(f"db_port = {self.db_port}\n")
f.write(f"user_name = {admin.name}\n")
-And in project.yml, add an entry in the builders section::
+And in project.yml, add an entry in the builders section:
+
+.. code-block:: yaml
- path: byob.DBConfigBuilder
args:
@@ -221,7 +235,9 @@ Case 2: enhancing an existing builder
-------------------------------------
The developer would like to push zip files of each generated folder, to
a web server via a POST method. This can be done easily by implementing a new builder as
-follows (after pip install requests)::
+follows (after pip install requests):
+
+.. code-block:: python
class WebPostDistributionBuilder(Builder):
def __init__(self, url):
@@ -236,14 +252,18 @@ follows (after pip install requests)::
files = {"upload_file": open(dest_zip_file, "rb")}
r = requests.post(self.url, files=files)
-And just replace the existing one with the new builder under Builders in the project.yml::
+And just replace the existing one with the new builder under Builders in the project.yml:
+
+.. code-block:: yaml
- path: byob.WebPostDistributionBuilder
args:
url: https://example.com/nvflare/provision
For the above two cases, if developers opt to use Open Provision API directly instead of project.yml, they can do this
-(some code omitted for clarity)::
+(some code omitted for clarity):
+
+.. code-block:: python
from byob import WebPostDistributionBuilder
builders = list()
@@ -258,20 +278,26 @@ For the above two cases, if developers opt to use Open Provision API directly in
Case 3: adding both new builders and participants of new types
--------------------------------------------------------------
The developers would like to add participants of type = 'gateway.' In order to handle this type of participants, a new
-builder is needed to write gateway specific configuration. First, specify that in project.yml::
+builder is needed to write gateway specific configuration. First, specify that in project.yml:
+
+.. code-block:: yaml
- name: gateway1
type: gateway
org: nvidia
port: 8102
-or in API style::
+or in API style:
+
+.. code-block:: python
participants = list()
p = Participant(name="gateway1", type="gateway", org="nvidia", port=8102)
participants.append(p)
-A new builder to write 'gateway.conf' can be implemented as follows (for reference)::
+A new builder to write 'gateway.conf' can be implemented as follows (for reference):
+
+.. code-block:: python
class GWConfigBuilder(Builder):
def build(self, project, ctx):
@@ -288,7 +314,9 @@ A new builder to write 'gateway.conf' can be implemented as follows (for referen
Case 4: adding a builder for enabling the creation of zip archives for the startup kits
---------------------------------------------------------------------------------------
DistributionBuilder was included in NVIDIA FLARE before version 2.2.1 but has been removed from the
-default builders. You can make this builder available and add it as a builder in project.yml if you want to zip the startup kits::
+default builders. You can make this builder available and add it as a builder in project.yml if you want to zip the startup kits:
+
+.. code-block:: python
import os
import shutil
@@ -338,7 +366,9 @@ default builders. You can make this builder available and add it as a builder in
else:
shutil.make_archive(dest_zip_file, "zip", root_dir=os.path.join(wip_dir, dir), base_dir="startup")
-If the above code is made available at ``nvflare.lighter.impl.workspace.DistributionBuilder``, add the following to your project.yml at the bottom of the list of builders::
+If the above code is made available at ``nvflare.lighter.impl.workspace.DistributionBuilder``, add the following to your project.yml at the bottom of the list of builders:
+
+.. code-block:: yaml
path: nvflare.lighter.impl.workspace.DistributionBuilder
args:
@@ -418,7 +448,7 @@ This is the key file that describes the information which provisioning tool will
If there is no ``project.yml`` in your current working directory, simply run ``provision`` without any option. It
will ask you if you would like to have one sample copy of this file created.
-.. code-block:: shell
+.. code-block:: console
(nvflare-venv) ~/workspace$ provision
No project.yml found in current folder.
@@ -436,9 +466,11 @@ Edit the project.yml configuration file to meet your project requirements:
- "participants" describes the different parties in the FL system, distinguished by type. For all participants, "name"
should be unique, and "org" should be defined in AuthPolicyBuilder. The "name" of the Overseer and servers should
be in the format of fully qualified domain names. It is possible to use a unique hostname rather than FQDN, with
- the IP mapped to the hostname by having it added to ``/etc/hosts``.
+ the IP mapped to the hostname by having it added to ``/etc/hosts``:
+
- Type "overseer" describes the Overseer, with the "org", "name", "protocol", "api_root", and "port".
- - Type "server" describes the FL servers, with the "org", "name", "fed_learn_port", "admin_port", and "enable_byoc".
+ - Type "server" describes the FL servers, with the "org", "name", "fed_learn_port", "admin_port", and "enable_byoc":
+
- "fed_learn_port" is the port number for communication between the FL server and FL clients
- "admin_port" is the port number for communication between the FL server and FL administration client
- Type "client" describes the FL clients, with one "org" and "name" for each client as well as "enable_byoc" settings.
diff --git a/docs/quickstart.rst b/docs/quickstart.rst
index a9a7b819bb..d0c7e60a1c 100644
--- a/docs/quickstart.rst
+++ b/docs/quickstart.rst
@@ -2,4 +2,4 @@
Quickstart
##########
-See :ref:`quickstart`.
+See :ref:`getting_started`.
diff --git a/docs/real_world_fl/application.rst b/docs/real_world_fl/application.rst
index b3a651bde0..9b3f7fe305 100644
--- a/docs/real_world_fl/application.rst
+++ b/docs/real_world_fl/application.rst
@@ -107,7 +107,7 @@ the client config should have the following in order to configure it as an Execu
Configuration of Executor Tasks is ignored here.
-Please follow :ref:`quickstart` to learn more.
+Please follow :ref:`getting_started` to learn more.
.. _troubleshooting_byoc:
diff --git a/docs/real_world_fl/flare_api.rst b/docs/real_world_fl/flare_api.rst
index 85a6d4a0ea..8f1ca49aaa 100644
--- a/docs/real_world_fl/flare_api.rst
+++ b/docs/real_world_fl/flare_api.rst
@@ -13,7 +13,7 @@ Initialization and Usage
------------------------
Initialize the FLARE API with :func:`new_secure_session<nvflare.fuel.flare_api.flare_api.new_secure_session>` by providing
the username and the path to the startup kit folder of the provisioned user containing the startup folder with the admin client's
-certs and keys::
+certs and keys:
.. code:: python
diff --git a/docs/real_world_fl/migrating_to_flare_api.rst b/docs/real_world_fl/migrating_to_flare_api.rst
index 44c3e9f696..7b34bdcc81 100644
--- a/docs/real_world_fl/migrating_to_flare_api.rst
+++ b/docs/real_world_fl/migrating_to_flare_api.rst
@@ -44,7 +44,7 @@ Initializing the FLAdminAPIRunner, which initializes FLAdminAPI with the values
:ref:`flare_api_initialization` is similar to :class:`FLAdminAPIRunner<nvflare.fuel.hci.client.fl_admin_api_runner.FLAdminAPIRunner>`
with :func:`new_secure_session<nvflare.fuel.flare_api.flare_api.new_secure_session>` taking two required arguments of
the username and the path to the root admin directory containing the startup folder with the admin client's
-certs and keys::
+certs and keys:
.. code:: python
@@ -92,7 +92,7 @@ and the new way with FLARE API.
.. csv-table::
:header: Command for FLAdminAPI,Command for FLARE API,Differences
- :widths: 15, 15, 30
+ :widths: 15, 15, 30, 30
check_status(),get_system_info(),Simplified and reformatted output, see below for details
submit_job(),submit_job(),Simplified output, see below for details
diff --git a/docs/real_world_fl/overview.rst b/docs/real_world_fl/overview.rst
index 707bdc0893..8218c110dd 100644
--- a/docs/real_world_fl/overview.rst
+++ b/docs/real_world_fl/overview.rst
@@ -203,7 +203,7 @@ issue commands to operate the system so it can be run with a script.
For a complete list of admin commands, see :ref:`operating_nvflare`.
-For examples of using the commands to operate a FL system, see the examples in the :ref:`quickstart` section.
+For examples of using the commands to operate a FL system, see the examples in the :ref:`getting_started` section.
****************************************************
Internal folder and file structures for NVIDIA FLARE
diff --git a/docs/resources/Dockerfile.doc b/docs/resources/Dockerfile.doc
new file mode 100644
index 0000000000..44d148aa92
--- /dev/null
+++ b/docs/resources/Dockerfile.doc
@@ -0,0 +1,12 @@
+ARG PYTORCH_IMAGE=nvcr.io/nvidia/pytorch:22.12-py3
+FROM ${PYTORCH_IMAGE}
+
+ARG NVF_VERSION=2.2
+ENV NVF_BRANCH=${NVF_VERSION}
+
+RUN python3 -m pip install -U pip
+RUN python3 -m pip install -U setuptools
+RUN python3 -m pip install nvflare
+
+WORKDIR /workspace/
+RUN git clone https://github.com/NVIDIA/NVFlare.git --branch ${NVF_BRANCH} --single-branch NVFlare
diff --git a/docs/user_guide/dashboard_ui.rst b/docs/user_guide/dashboard_ui.rst
index fc28176349..6006797701 100644
--- a/docs/user_guide/dashboard_ui.rst
+++ b/docs/user_guide/dashboard_ui.rst
@@ -120,8 +120,6 @@ the table, then type the client site name in the input box in the table for the
outside of the input. Click on the value again to edit it. The fields for NUM GPU (number of GPUs) and MEMORY PER GPU (memory per GPU in GiBs) can
also be edited here. When done configuring client sites, click ``Next`` below to complete registration, and you will be linked to the User Dashboard.
-.. _dashboard_org_admin_user_reg:
-
Org Admin User Dashboard
------------------------
The User Dashboard for ``Org Admin`` users is the same as for ``Member`` and ``Lead`` users on the top, but below the user information is an
diff --git a/requirements-doc.txt b/requirements-doc.txt
index fa4e9cc46c..6e053ad73b 100644
--- a/requirements-doc.txt
+++ b/requirements-doc.txt
@@ -2,3 +2,4 @@
sphinx>=4.1.1
sphinx_rtd_theme
recommonmark
+sphinx-copybutton
|
numpy__numpy-3055 | 2to3 run `execfile` fixer
| [
{
"content": "#!/usr/bin/env python\n\"\"\"\nA setup.py script to use setuptools, which gives egg goodness, etc.\n\nThis is used to build installers for OS X through bdist_mpkg.\n\nNotes\n-----\nUsing ``python setupegg.py install`` directly results in file permissions being\nset wrong, with nose refusing to run... | [
{
"content": "#!/usr/bin/env python\n\"\"\"\nA setup.py script to use setuptools, which gives egg goodness, etc.\n\nThis is used to build installers for OS X through bdist_mpkg.\n\nNotes\n-----\nUsing ``python setupegg.py install`` directly results in file permissions being\nset wrong, with nose refusing to run... | diff --git a/setupegg.py b/setupegg.py
index 3ed1e0b0bc70..82b35fd696f6 100755
--- a/setupegg.py
+++ b/setupegg.py
@@ -21,4 +21,4 @@
setupfile = imp.load_source('setupfile', 'setup.py')
setupfile.setup_package()
else:
- execfile('setup.py')
+ exec(compile(open('setup.py').read(), 'setup.py', 'exec'))
|
mozilla__bugbug-3921 | [model:regressor] AttributeError: 'IsotonicRegressionCalibrator' object has no attribute 'n_features_in_'
https://community-tc.services.mozilla.com/tasks/HncpjvKKRcSnxL_GJ8PV9A/runs/0/logs/public/logs/live.log
```
Traceback (most recent call last):
File "/usr/local/bin/bugbug-train", line 8, in <module>
sys.exit(main())
File "/usr/local/lib/python3.10/site-packages/scripts/trainer.py", line 141, in main
retriever.go(args)
File "/usr/local/lib/python3.10/site-packages/scripts/trainer.py", line 41, in go
metrics = model_obj.train(limit=args.limit)
File "/usr/local/lib/python3.10/site-packages/bugbug/model.py", line 418, in train
logger.info("Number of features: %d", self.clf.steps[-1][1].n_features_in_)
AttributeError: 'IsotonicRegressionCalibrator' object has no attribute 'n_features_in_'
```
| [
{
"content": "# -*- coding: utf-8 -*-\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n\nfrom sklearn.base import BaseEstimator, ClassifierMixin\nfrom sklearn... | [
{
"content": "# -*- coding: utf-8 -*-\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n\nfrom sklearn.base import BaseEstimator, ClassifierMixin\nfrom sklearn... | diff --git a/bugbug/model_calibration.py b/bugbug/model_calibration.py
index 2d2f2398d3..5643706d29 100644
--- a/bugbug/model_calibration.py
+++ b/bugbug/model_calibration.py
@@ -27,3 +27,7 @@ def predict(self, X):
def predict_proba(self, X):
return self.calibrated_clf.predict_proba(X)
+
+ @property
+ def n_features_in_(self):
+ return self.base_clf.n_features_in_
|
statsmodels__statsmodels-2279 | version number convention pep-440
https://github.com/scipy/scipy/pull/4307
just changes a `-` to `+`
we need the same change
line 228 in setup.py `FULLVERSION += '.dev-' + GIT_REVISION[:7]`
I have no idea if we need anything else
| [
{
"content": "\"\"\"\nMuch of the build system code was adapted from work done by the pandas\ndevelopers [1], which was in turn based on work done in pyzmq [2] and lxml [3].\n\n[1] http://pandas.pydata.org\n[2] http://zeromq.github.io/pyzmq/\n[3] http://lxml.de/\n\"\"\"\n\nimport os\nfrom os.path import relpath... | [
{
"content": "\"\"\"\nMuch of the build system code was adapted from work done by the pandas\ndevelopers [1], which was in turn based on work done in pyzmq [2] and lxml [3].\n\n[1] http://pandas.pydata.org\n[2] http://zeromq.github.io/pyzmq/\n[3] http://lxml.de/\n\"\"\"\n\nimport os\nfrom os.path import relpath... | diff --git a/setup.py b/setup.py
index 9ba11cdc2ec..07e9708cf7e 100644
--- a/setup.py
+++ b/setup.py
@@ -225,7 +225,7 @@ def write_version_py(filename=pjoin(curdir, 'statsmodels/version.py')):
GIT_REVISION = "Unknown"
if not ISRELEASED:
- FULLVERSION += '.dev-' + GIT_REVISION[:7]
+ FULLVERSION += '.dev0+' + GIT_REVISION[:7]
if dowrite:
|
pypa__pipenv-5495 | Include missing package data for Safety
### The issue
#5491
### The fix
Include the missing package data for Safety.
### The checklist
* [ ] Build wheels and test if it is working fine.
<!--
### If this is a patch to the `vendor` directory...
Please try to refrain from submitting patches directly to `vendor` or `patched`, but raise your issue to the upstream project instead, and inform Pipenv to upgrade when the upstream project accepts the fix.
A pull request to upgrade vendor packages is strongly discouraged, unless there is a very good reason (e.g. you need to test Pipenv’s integration to a new vendor feature). Pipenv audits and performs vendor upgrades regularly, generally before a new release is about to drop.
If your patch is not or cannot be accepted by upstream, but is essential to Pipenv (make sure to discuss this with maintainers!), please remember to attach a patch file in `tasks/vendoring/patched`, so this divergence from upstream can be recorded and replayed afterwards.
-->
| [
{
"content": "#!/usr/bin/env python\nimport codecs\nimport os\nimport sys\n\nfrom setuptools import find_packages, setup\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\nwith codecs.open(os.path.join(here, \"README.md\"), encoding=\"utf-8\") as f:\n long_description = \"\\n\" + f.read()\n\nabout = {}\... | [
{
"content": "#!/usr/bin/env python\nimport codecs\nimport os\nimport sys\n\nfrom setuptools import find_packages, setup\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\nwith codecs.open(os.path.join(here, \"README.md\"), encoding=\"utf-8\") as f:\n long_description = \"\\n\" + f.read()\n\nabout = {}\... | diff --git a/setup.py b/setup.py
index 73d52ff3d9..beb4818108 100644
--- a/setup.py
+++ b/setup.py
@@ -67,6 +67,7 @@
"w32.exe",
"w64.exe",
],
+ "pipenv.vendor.ruamel": ["yaml"],
},
python_requires=">=3.7",
zip_safe=True,
|
python-telegram-bot__python-telegram-bot-1485 | Use UTC dates
https://github.com/python-telegram-bot/python-telegram-bot/blob/439790375ed8ed493c43e464aa8e2b60a77939db/telegram/utils/helpers.py#L78-L90
Should probably be using `tz=timezone.utc`. Python's `datetime` isn't the best, and `fromtimestamp` by default sets no `tz` information, which uses the local time, which in turn is generally a bad idea.
| [
{
"content": "#!/usr/bin/env python\n#\n# A library that provides a Python interface to the Telegram Bot API\n# Copyright (C) 2015-2018\n# Leandro Toledo de Souza <devs@python-telegram-bot.org>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser Pub... | [
{
"content": "#!/usr/bin/env python\n#\n# A library that provides a Python interface to the Telegram Bot API\n# Copyright (C) 2015-2018\n# Leandro Toledo de Souza <devs@python-telegram-bot.org>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser Pub... | diff --git a/telegram/utils/helpers.py b/telegram/utils/helpers.py
index 740c19ff1bd..c7697db4239 100644
--- a/telegram/utils/helpers.py
+++ b/telegram/utils/helpers.py
@@ -87,7 +87,7 @@ def from_timestamp(unixtime):
if not unixtime:
return None
- return datetime.fromtimestamp(unixtime)
+ return datetime.utcfromtimestamp(unixtime)
def mention_html(user_id, name):
|
rasterio__rasterio-1305 | Add `GetMetadataItem` like method
Ref: https://github.com/mapbox/rasterio/issues/1077,
I'm proposing to add a new method in https://github.com/mapbox/rasterio/blob/master/rasterio/_base.pyx to replicate GDAL GetMetadataItem
**Method Name:** `get_metadata_item` or `get_metadata`
**Why:** I need to be able to get TIFF metadata like `band.GetMetadataItem('IFD_OFFSET', 'TIFF')`
**Code:**
```cython
def get_metadata(self, bidx, ns, dm=None, ovr=None):
"""Returns metadata item
Parameters
----------
bidx: int
Band index, starting with 1.
name: str
The key for the metadata item to fetch.
domain: str
The domain to fetch for.
ovr: int
Overview level
Returns
-------
str
"""
cdef GDALMajorObjectH b = NULL
cdef GDALMajorObjectH obj = NULL
cdef char *value = NULL
cdef const char *name = NULL
cdef const char *domain = NULL
ns = ns.encode('utf-8')
name = ns
if dm:
dm = dm.encode('utf-8')
domain = dm
b = self.band(bidx)
if ovr:
b = GDALGetOverview(b, ovr)
obj = b
value = GDALGetMetadataItem(obj, name, domain)
if value == NULL:
return None
else:
return value
```
@sgillies I'm happy to submit a PR for that :-)
| [
{
"content": "\"\"\"Errors and Warnings.\"\"\"\n\nfrom click import FileError\n\n\nclass RasterioError(Exception):\n \"\"\"Root exception class\"\"\"\n\n\nclass WindowError(RasterioError):\n \"\"\"Raised when errors occur during window operations\"\"\"\n\n\nclass CRSError(ValueError):\n \"\"\"Raised wh... | [
{
"content": "\"\"\"Errors and Warnings.\"\"\"\n\nfrom click import FileError\n\n\nclass RasterioError(Exception):\n \"\"\"Root exception class\"\"\"\n\n\nclass WindowError(RasterioError):\n \"\"\"Raised when errors occur during window operations\"\"\"\n\n\nclass CRSError(ValueError):\n \"\"\"Raised wh... | diff --git a/rasterio/_base.pyx b/rasterio/_base.pyx
index 4677caf4c..dcc7d6393 100644
--- a/rasterio/_base.pyx
+++ b/rasterio/_base.pyx
@@ -25,8 +25,8 @@ from rasterio.enums import (
ColorInterp, Compression, Interleaving, MaskFlags, PhotometricInterp)
from rasterio.env import Env
from rasterio.errors import (
- RasterioIOError, CRSError, DriverRegistrationError,
- NotGeoreferencedWarning, RasterBlockError)
+ RasterioIOError, CRSError, DriverRegistrationError, NotGeoreferencedWarning,
+ RasterBlockError, BandOverviewError)
from rasterio.profiles import Profile
from rasterio.transform import Affine, guard_transform, tastes_like_gdal
from rasterio.vfs import parse_path, vsi_path
@@ -782,17 +782,72 @@ cdef class DatasetBase(object):
num_items = CSLCount(metadata)
return dict(metadata[i].split('=', 1) for i in range(num_items))
+
+ def get_tag_item(self, ns, dm=None, bidx=0, ovr=None):
+ """Returns tag item value
+
+ Parameters
+ ----------
+ ns: str
+ The key for the metadata item to fetch.
+ dm: str
+ The domain to fetch for.
+ bidx: int
+ Band index, starting with 1.
+ ovr: int
+ Overview level
+
+ Returns
+ -------
+ str
+ """
+ cdef GDALMajorObjectH band = NULL
+ cdef GDALMajorObjectH obj = NULL
+ cdef char *value = NULL
+ cdef const char *name = NULL
+ cdef const char *domain = NULL
+
+ ns = ns.encode('utf-8')
+ name = ns
+
+ if dm:
+ dm = dm.encode('utf-8')
+ domain = dm
+
+ if not bidx > 0 and ovr:
+ raise Exception("Band index (bidx) option needed for overview level")
+
+ if bidx > 0:
+ band = self.band(bidx)
+ else:
+ band = self._hds
+
+ if ovr:
+ obj = GDALGetOverview(band, ovr)
+ if obj == NULL:
+ raise BandOverviewError(
+ "Failed to retrieve overview {}".format(ovr))
+ else:
+ obj = band
+
+ value = GDALGetMetadataItem(obj, name, domain)
+ if value == NULL:
+ return None
+ else:
+ return value
+
+
property colorinterp:
"""Returns a sequence of ``ColorInterp.<enum>`` representing
color interpretation in band order.
-
+
To set color interpretation, provide a sequence of
``ColorInterp.<enum>``:
-
+
import rasterio
from rasterio.enums import ColorInterp
-
+
with rasterio.open('rgba.tif', 'r+') as src:
src.colorinterp = (
ColorInterp.red,
diff --git a/rasterio/errors.py b/rasterio/errors.py
index d2a1c2532..f42cfda18 100644
--- a/rasterio/errors.py
+++ b/rasterio/errors.py
@@ -86,3 +86,7 @@ class RasterioDeprecationWarning(UserWarning):
class RasterBlockError(RasterioError):
"""Raised when raster block access fails"""
+
+
+class BandOverviewError(UserWarning):
+ """Raised when a band overview access fails."""
diff --git a/tests/data/cogeo.tif b/tests/data/cogeo.tif
new file mode 100644
index 000000000..7d40bfb64
Binary files /dev/null and b/tests/data/cogeo.tif differ
diff --git a/tests/test_tag_item.py b/tests/test_tag_item.py
new file mode 100644
index 000000000..d44ea9236
--- /dev/null
+++ b/tests/test_tag_item.py
@@ -0,0 +1,34 @@
+#-*- coding: utf-8 -*-
+import logging
+import sys
+
+import pytest
+
+from .conftest import requires_gdal22
+
+import rasterio
+from rasterio.errors import BandOverviewError
+
+logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
+
+
+def test_get_tag_item():
+ with rasterio.open('tests/data/cogeo.tif') as src:
+ assert src.get_tag_item('INTERLEAVE', 'IMAGE_STRUCTURE') == 'PIXEL'
+
+
+@requires_gdal22(reason="Access to IFD tags requires GDAL 2.2+")
+def test_get_tag_item_Tiff():
+ with rasterio.open('tests/data/cogeo.tif') as src:
+ assert src.get_tag_item('IFD_OFFSET', 'TIFF', bidx=1) == '8'
+ assert src.get_tag_item('IFD_OFFSET', 'TIFF', bidx=1, ovr=1) == '1504'
+ assert not src.get_tag_item('IF', 'TIFF', bidx=1)
+ with pytest.raises(Exception):
+ src.get_tag_item('IFD_OFFSET', 'TIFF', ovr=1)
+
+
+@requires_gdal22(reason="Access to IFD tags requires GDAL 2.2+")
+def test_get_tag_item_noOverview():
+ with rasterio.open('tests/data/rgb3.tif') as src:
+ with pytest.raises(BandOverviewError):
+ src.get_tag_item('IFD_OFFSET', 'TIFF', bidx=1, ovr=1)
|
adamchainz__django-perf-rec-320 | Doesn't capture cache.get_or_set
I would expect that `cache.get_or_set` either be recorded as a `get` and (optionally) a `set`, or perhaps as its own entry. Instead it seems to be ignored completely:
``` python
class TestCache(TestCase):
def test_get(self):
with django_perf_rec.record():
django_cache.get('foo')
def test_set(self):
with django_perf_rec.record():
django_cache.set('foo', 42)
def test_get_or_set(self):
with django_perf_rec.record():
django_cache.get_or_set('foo', 42)
```
This results in
``` yaml
TestCache.test_get:
- cache|get: foo
TestCache.test_get_or_set: []
TestCache.test_set:
- cache|set: foo
```
It looks like this is a result of some code which aims to identify "internal" usages of the cache, though I'm not really sure.
| [
{
"content": "import inspect\nimport re\nimport traceback\nfrom collections.abc import Mapping, Sequence\nfrom functools import wraps\nfrom types import MethodType\n\nfrom django.core.cache import DEFAULT_CACHE_ALIAS, caches\n\nfrom django_perf_rec.operation import AllSourceRecorder, Operation\n\n\nclass CacheO... | [
{
"content": "import inspect\nimport re\nimport traceback\nfrom collections.abc import Mapping, Sequence\nfrom functools import wraps\nfrom types import MethodType\n\nfrom django.core.cache import DEFAULT_CACHE_ALIAS, caches\n\nfrom django_perf_rec.operation import AllSourceRecorder, Operation\n\n\nclass CacheO... | diff --git a/HISTORY.rst b/HISTORY.rst
index 479d3bb9..d15de7d0 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -2,6 +2,11 @@
History
=======
+* Correctly record calls to ``cache.get_or_set()``.
+
+ Thanks to Peter Law for the report in
+ `Issue #319 <https://github.com/adamchainz/django-perf-rec/issues/319>`__.
+
4.9.0 (2020-11-04)
------------------
diff --git a/src/django_perf_rec/cache.py b/src/django_perf_rec/cache.py
index 22257a73..4ab8a0b2 100644
--- a/src/django_perf_rec/cache.py
+++ b/src/django_perf_rec/cache.py
@@ -120,6 +120,7 @@ def __exit__(self, _, __, ___):
"delete_many",
"get",
"get_many",
+ "get_or_set",
"incr",
"set",
"set_many",
diff --git a/tests/test_api.perf.yml b/tests/test_api.perf.yml
index 85282e1c..d64b5f85 100644
--- a/tests/test_api.perf.yml
+++ b/tests/test_api.perf.yml
@@ -1,8 +1,10 @@
RecordTests.test_delete_on_cascade_called_twice:
-- db: 'DELETE FROM "testapp_book" WHERE "testapp_book"."author_id" IN (#)'
-- db: 'DELETE FROM "testapp_award" WHERE "testapp_award"."author_id" IN (#)'
-- db: 'DELETE FROM "testapp_contract_author" WHERE "testapp_contract_author"."author_id" IN (#)'
-- db: 'DELETE FROM "testapp_author" WHERE "testapp_author"."id" IN (#)'
+- db: DELETE FROM "testapp_book" WHERE "testapp_book"."author_id" IN (#)
+- db: DELETE FROM "testapp_award" WHERE "testapp_award"."author_id" IN (#)
+- db: DELETE FROM "testapp_contract_author" WHERE "testapp_contract_author"."author_id" IN (#)
+- db: DELETE FROM "testapp_author" WHERE "testapp_author"."id" IN (#)
+RecordTests.test_get_or_set:
+- cache|get_or_set: foo
RecordTests.test_multiple_cache_ops:
- cache|set: foo
- cache|second|get_many:
diff --git a/tests/test_api.py b/tests/test_api.py
index c0975b3e..232a8147 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -69,6 +69,10 @@ def test_single_cache_op(self):
with record():
caches["default"].get("foo")
+ def test_get_or_set(self):
+ with record():
+ caches["default"].get_or_set("foo", 42)
+
def test_single_cache_op_with_traceback(self):
with pretend_not_under_pytest():
with pytest.raises(AssertionError) as excinfo:
|
jazzband__django-oauth-toolkit-1145 | Release of version 2.0.0
Hello everyone
I would like to know if there is any prediction for the 2.0.0 version to be released. Today it is unrealized in the doc, and has two vulnerability alerts for versions lower than it in the [safety](https://pyup.io/). Thank you!
| [
{
"content": "import django\n\n\n__version__ = \"1.7.1\"\n\nif django.VERSION < (3, 2):\n default_app_config = \"oauth2_provider.apps.DOTConfig\"\n",
"path": "oauth2_provider/__init__.py"
}
] | [
{
"content": "import django\n\n\n__version__ = \"2.0.0\"\n\nif django.VERSION < (3, 2):\n default_app_config = \"oauth2_provider.apps.DOTConfig\"\n",
"path": "oauth2_provider/__init__.py"
}
] | diff --git a/CHANGELOG.md b/CHANGELOG.md
index baae70de8..7819fe616 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -16,7 +16,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [unreleased]
-## [2.0.0] unreleased
+## [2.0.0] 2022-04-24
+
+This is a major release with **BREAKING** changes. Please make sure to review these changes before upgrading:
### Added
* #1106 OIDC: Add "scopes_supported" to the [ConnectDiscoveryInfoView](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html#connectdiscoveryinfoview).
@@ -28,8 +30,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* #1129 (**Breaking**) Changed default value of PKCE_REQUIRED to True. This is a **breaking change**. Clients without
PKCE enabled will fail to authenticate. This breaks with [section 5 of RFC7636](https://datatracker.ietf.org/doc/html/rfc7636)
in favor of the [OAuth2 Security Best Practices for Authorization Code Grants](https://datatracker.ietf.org/doc/html/draft-ietf-oauth-security-topics#section-2.1).
- If you want to retain the pre-2.x behavior, set `PKCE_REQUIRED = False ` in your settings.py
-
+ If you want to retain the pre-2.x behavior, set `PKCE_REQUIRED = False` in your settings.py
* #1093 (**Breaking**) Changed to implement [hashed](https://docs.djangoproject.com/en/stable/topics/auth/passwords/)
client_secret values. This is a **breaking change** that will migrate all your existing
cleartext `application.client_secret` values to be hashed with Django's default password hashing algorithm
@@ -43,7 +44,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Fixed
* #1108 OIDC: Fix `validate_bearer_token()` to properly set `request.scopes` to the list of granted scopes.
-* #1132: Fixed help text for `--skip-authorization` argument of the `createapplication` management command
+* #1132: Fixed help text for `--skip-authorization` argument of the `createapplication` management command.
### Removed
* #1124 (**Breaking**, **Security**) Removes support for insecure `urn:ietf:wg:oauth:2.0:oob` and `urn:ietf:wg:oauth:2.0:oob:auto` which are replaced
diff --git a/oauth2_provider/__init__.py b/oauth2_provider/__init__.py
index 9024b6f63..49a4433da 100644
--- a/oauth2_provider/__init__.py
+++ b/oauth2_provider/__init__.py
@@ -1,7 +1,7 @@
import django
-__version__ = "1.7.1"
+__version__ = "2.0.0"
if django.VERSION < (3, 2):
default_app_config = "oauth2_provider.apps.DOTConfig"
|
frappe__frappe-17020 | Remove Share doesn't disappear
## Description of the issue
When the read Permission of a Share is removed by de-selecting the checkbox, then the corresponding DocShare is removed in the Backend, but the checkbox is automatically re-selected in the frontend. After a refresh, the share
## Context information (for bug reports)
**Output of `bench version`**
```
frappe 14.x.x-develop
```
## Steps to reproduce the issue
1. Open an arbitrary Document
2. Add a share with read permissions
3. Remove the read permission by clicking the checkbox
### Observed result
The checkbox is automatically re-selected
### Expected result
The share entry disappears
## Additional information
| [
{
"content": "# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors\n# License: MIT. See LICENSE\n\nimport frappe\nfrom frappe import _\nfrom frappe.desk.doctype.notification_log.notification_log import (\n\tenqueue_create_notification,\n\tget_title,\n\tget_title_html,\n)\nfrom frappe.desk.form.d... | [
{
"content": "# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors\n# License: MIT. See LICENSE\n\nimport frappe\nfrom frappe import _\nfrom frappe.desk.doctype.notification_log.notification_log import (\n\tenqueue_create_notification,\n\tget_title,\n\tget_title_html,\n)\nfrom frappe.desk.form.d... | diff --git a/frappe/share.py b/frappe/share.py
index 01d1412b8d8f..3edcb1be38c8 100644
--- a/frappe/share.py
+++ b/frappe/share.py
@@ -93,7 +93,7 @@ def set_permission(doctype, name, user, permission_to, value=1, everyone=0):
if not (share.read or share.write or share.submit or share.share):
share.delete()
- share = {}
+ share = None
return share
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.