instance_id
stringlengths 10
57
| file_changes
listlengths 1
15
| repo
stringlengths 7
53
| base_commit
stringlengths 40
40
| problem_statement
stringlengths 11
52.5k
| patch
stringlengths 251
7.06M
|
|---|---|---|---|---|---|
0b01001001__spectree-64
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": null
},
"file": "setup.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"spectree/utils.py:parse_params"
],
"edited_modules": [
"spectree/utils.py:parse_params"
]
},
"file": "spectree/utils.py"
}
] |
0b01001001/spectree
|
a091fab020ac26548250c907bae0855273a98778
|
[BUG]description for query paramters can not show in swagger ui
Hi, when I add a description for a schema used in query, it can not show in swagger ui but can show in Redoc
```py
@HELLO.route('/', methods=['GET'])
@api.validate(query=HelloForm)
def hello():
"""
hello 注释
:return:
"""
return 'ok'
class HelloForm(BaseModel):
"""
hello表单
"""
user: str # 用户名称
msg: str = Field(description='msg test', example='aa')
index: int
data: HelloGetListForm
list: List[HelloListForm]
```


|
diff --git a/setup.py b/setup.py
index 1b3cb64..4ef21e6 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f:
setup(
name='spectree',
- version='0.3.7',
+ version='0.3.8',
author='Keming Yang',
author_email='kemingy94@gmail.com',
description=('generate OpenAPI document and validate request&response '
diff --git a/spectree/utils.py b/spectree/utils.py
index bb5698d..73d6c71 100644
--- a/spectree/utils.py
+++ b/spectree/utils.py
@@ -54,6 +54,7 @@ def parse_params(func, params, models):
'in': 'query',
'schema': schema,
'required': name in query.get('required', []),
+ 'description': schema.get('description', ''),
})
if hasattr(func, 'headers'):
@@ -64,6 +65,7 @@ def parse_params(func, params, models):
'in': 'header',
'schema': schema,
'required': name in headers.get('required', []),
+ 'description': schema.get('description', ''),
})
if hasattr(func, 'cookies'):
@@ -74,6 +76,7 @@ def parse_params(func, params, models):
'in': 'cookie',
'schema': schema,
'required': name in cookies.get('required', []),
+ 'description': schema.get('description', ''),
})
return params
|
12rambau__sepal_ui-411
|
[
{
"changes": {
"added_entities": [
"sepal_ui/sepalwidgets/inputs.py:DatePicker.disable"
],
"added_modules": null,
"edited_entities": null,
"edited_modules": [
"sepal_ui/sepalwidgets/inputs.py:DatePicker"
]
},
"file": "sepal_ui/sepalwidgets/inputs.py"
}
] |
12rambau/sepal_ui
|
179bd8d089275c54e94a7614be7ed03d298ef532
|
add a disabled trait on the datepicker
I'm currently coding it in a module and the process of disabling a datepicker is uterly boring. I think we could add an extra trait to the layout and pilot the enabling and disabling directly from the built-in widget
```python
self.w_start = sw.DatePicker(label="start", v_model=None)
# disable both the slots (hidden to everyone) and the menu
self.w_start.menu.v_slots[0]["children"].disabled = True
self.w_start.menu.disabled = True
```
|
diff --git a/docs/source/modules/sepal_ui.sepalwidgets.DatePicker.rst b/docs/source/modules/sepal_ui.sepalwidgets.DatePicker.rst
index 1a982afb..867227cb 100644
--- a/docs/source/modules/sepal_ui.sepalwidgets.DatePicker.rst
+++ b/docs/source/modules/sepal_ui.sepalwidgets.DatePicker.rst
@@ -8,6 +8,7 @@ sepal\_ui.sepalwidgets.DatePicker
.. autosummary::
~DatePicker.menu
+ ~DatePicker.disabled
.. rubric:: Methods
@@ -15,5 +16,8 @@ sepal\_ui.sepalwidgets.DatePicker
:nosignatures:
~Datepicker.close_menu
+ ~DatePicker.disable
-.. automethod:: sepal_ui.sepalwidgets.DatePicker.close_menu
\ No newline at end of file
+.. automethod:: sepal_ui.sepalwidgets.DatePicker.close_menu
+
+.. automethod:: sepal_ui.sepalwidgets.DatePicker.disable
\ No newline at end of file
diff --git a/sepal_ui/sepalwidgets/inputs.py b/sepal_ui/sepalwidgets/inputs.py
index 3ad7f1a9..68b81746 100644
--- a/sepal_ui/sepalwidgets/inputs.py
+++ b/sepal_ui/sepalwidgets/inputs.py
@@ -1,7 +1,7 @@
from pathlib import Path
import ipyvuetify as v
-from traitlets import link, Int, Any, List, observe, Dict, Unicode
+from traitlets import link, Int, Any, List, observe, Dict, Unicode, Bool
from ipywidgets import jslink
import pandas as pd
import ee
@@ -40,6 +40,9 @@ class DatePicker(v.Layout, SepalWidget):
menu = None
"v.Menu: the menu widget to display the datepicker"
+ disabled = Bool(False).tag(sync=True)
+ "traitlets.Bool: the disabled status of the Datepicker object"
+
def __init__(self, label="Date", **kwargs):
# create the widgets
@@ -93,6 +96,14 @@ class DatePicker(v.Layout, SepalWidget):
return
+ @observe("disabled")
+ def disable(self, change):
+ """A method to disabled the appropriate components in the datipkcer object"""
+
+ self.menu.v_slots[0]["children"].disabled = self.disabled
+
+ return
+
class FileInput(v.Flex, SepalWidget):
"""
|
12rambau__sepal_ui-416
|
[
{
"changes": {
"added_entities": [
"sepal_ui/sepalwidgets/app.py:DrawerItem.add_notif",
"sepal_ui/sepalwidgets/app.py:DrawerItem.remove_notif"
],
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/app.py:DrawerItem.__init__",
"sepal_ui/sepalwidgets/app.py:DrawerItem._on_click"
],
"edited_modules": [
"sepal_ui/sepalwidgets/app.py:DrawerItem"
]
},
"file": "sepal_ui/sepalwidgets/app.py"
}
] |
12rambau/sepal_ui
|
8b76805db051d6d15024bd9ec2d78502cd92132e
|
Interact with navigation drawers
Sometimes is useful to pass some data from the module model to the app environment and so far we do not have this implementation.
We can add two simple methods to the drawers so they can update their state with icons, badges, and so.
|
diff --git a/docs/source/modules/sepal_ui.sepalwidgets.DrawerItem.rst b/docs/source/modules/sepal_ui.sepalwidgets.DrawerItem.rst
index a3280cd3..22b87b44 100644
--- a/docs/source/modules/sepal_ui.sepalwidgets.DrawerItem.rst
+++ b/docs/source/modules/sepal_ui.sepalwidgets.DrawerItem.rst
@@ -7,7 +7,9 @@ sepal\_ui.sepalwidgets.DrawerItem
.. autosummary::
- ~DrawerItem.rt
+ ~DrawerItem.rt
+ ~DrawerItem.alert
+ ~DrawerItem.alert_badge
.. rubric:: Methods
@@ -15,5 +17,11 @@ sepal\_ui.sepalwidgets.DrawerItem
:nosignatures:
~DrawerItem.display_tile
+ ~DrawerItem.add_notif
+ ~DrawerItem.remove_notif
-.. automethod:: sepal_ui.sepalwidgets.DrawerItem.display_tile
\ No newline at end of file
+.. automethod:: sepal_ui.sepalwidgets.DrawerItem.display_tile
+
+.. automethod:: sepal_ui.sepalwidgets.DrawerItem.add_notif
+
+.. automethod:: sepal_ui.sepalwidgets.DrawerItem.remove_notif
\ No newline at end of file
diff --git a/sepal_ui/sepalwidgets/app.py b/sepal_ui/sepalwidgets/app.py
index a1aff843..2a87de83 100644
--- a/sepal_ui/sepalwidgets/app.py
+++ b/sepal_ui/sepalwidgets/app.py
@@ -1,3 +1,4 @@
+from traitlets import link, Bool, observe
from functools import partial
from datetime import datetime
@@ -73,12 +74,29 @@ class DrawerItem(v.ListItem, SepalWidget):
card (str, optional): the mount_id of tiles in the app
href (str, optional): the absolute link to an external web page
kwargs (optional): any parameter from a v.ListItem. If set, '_metadata', 'target', 'link' and 'children' will be overwritten.
+ model (optional): sepalwidget model where is defined the bin_var trait
+ bind_var (optional): required when model is selected. Trait to link with 'alert' self trait parameter
"""
rt = None
"sw.ResizeTrigger: the trigger to resize maps and other javascript object when jumping from a tile to another"
- def __init__(self, title, icon=None, card=None, href=None, **kwargs):
+ alert = Bool(False).tag(sync=True)
+ "Bool: trait to control visibility of an alert in the drawer item"
+
+ alert_badge = None
+ "v.ListItemAction: red circle to display in the drawer"
+
+ def __init__(
+ self,
+ title,
+ icon=None,
+ card=None,
+ href=None,
+ model=None,
+ bind_var=None,
+ **kwargs
+ ):
# set the resizetrigger
self.rt = js.rt
@@ -108,6 +126,45 @@ class DrawerItem(v.ListItem, SepalWidget):
# call the constructor
super().__init__(**kwargs)
+ # cannot be set as a class member because it will be shared with all
+ # the other draweritems.
+ self.alert_badge = v.ListItemAction(
+ children=[v.Icon(children=["fas fa-circle"], x_small=True, color="red")]
+ )
+
+ if model:
+ if not bind_var:
+ raise Exception(
+ "You have selected a model, you need a trait to bind with drawer."
+ )
+
+ link((model, bind_var), (self, "alert"))
+
+ @observe("alert")
+ def add_notif(self, change):
+ """Add a notification alert to drawer"""
+
+ if change["new"]:
+ if self.alert_badge not in self.children:
+ new_children = self.children[:]
+ new_children.append(self.alert_badge)
+ self.children = new_children
+ else:
+ self.remove_notif()
+
+ return
+
+ def remove_notif(self):
+ """Remove notification alert"""
+
+ if self.alert_badge in self.children:
+ new_children = self.children[:]
+ new_children.remove(self.alert_badge)
+
+ self.children = new_children
+
+ return
+
def display_tile(self, tiles):
"""
Display the apropriate tiles when the item is clicked.
@@ -138,6 +195,9 @@ class DrawerItem(v.ListItem, SepalWidget):
# change the current item status
self.input_value = True
+ # Remove notification
+ self.remove_notif()
+
return self
|
12rambau__sepal_ui-418
|
[
{
"changes": {
"added_entities": [
"sepal_ui/sepalwidgets/inputs.py:DatePicker.check_date",
"sepal_ui/sepalwidgets/inputs.py:DatePicker.is_valid_date"
],
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/inputs.py:DatePicker.__init__"
],
"edited_modules": [
"sepal_ui/sepalwidgets/inputs.py:DatePicker"
]
},
"file": "sepal_ui/sepalwidgets/inputs.py"
}
] |
12rambau/sepal_ui
|
8b76805db051d6d15024bd9ec2d78502cd92132e
|
Can't instantiate a sw.DatePicker with initial v_model
Is not possible to instantiate the sepal DatePicker with an initially given date through the `v_model` parameter
|
diff --git a/docs/source/modules/sepal_ui.sepalwidgets.DatePicker.rst b/docs/source/modules/sepal_ui.sepalwidgets.DatePicker.rst
index 867227cb..322cca23 100644
--- a/docs/source/modules/sepal_ui.sepalwidgets.DatePicker.rst
+++ b/docs/source/modules/sepal_ui.sepalwidgets.DatePicker.rst
@@ -9,6 +9,7 @@ sepal\_ui.sepalwidgets.DatePicker
~DatePicker.menu
~DatePicker.disabled
+ ~DatePicker.date_text
.. rubric:: Methods
@@ -17,7 +18,13 @@ sepal\_ui.sepalwidgets.DatePicker
~Datepicker.close_menu
~DatePicker.disable
+ ~DatePicker.is_valid_date
+ ~DatePicker.check_date
.. automethod:: sepal_ui.sepalwidgets.DatePicker.close_menu
-.. automethod:: sepal_ui.sepalwidgets.DatePicker.disable
\ No newline at end of file
+.. automethod:: sepal_ui.sepalwidgets.DatePicker.disable
+
+.. automethod:: sepal_ui.sepalwidgets.DatePicker.check_date
+
+.. autofunction:: sepal_ui.sepalwidgets.DatePicker.disable
\ No newline at end of file
diff --git a/sepal_ui/sepalwidgets/inputs.py b/sepal_ui/sepalwidgets/inputs.py
index 7d003229..bf1adf0b 100644
--- a/sepal_ui/sepalwidgets/inputs.py
+++ b/sepal_ui/sepalwidgets/inputs.py
@@ -1,4 +1,5 @@
from pathlib import Path
+from datetime import datetime
import ipyvuetify as v
from traitlets import link, Int, Any, List, observe, Dict, Unicode, Bool
@@ -40,6 +41,9 @@ class DatePicker(v.Layout, SepalWidget):
menu = None
"v.Menu: the menu widget to display the datepicker"
+ date_text = None
+ "v.TextField: the text field of the datepicker widget"
+
disabled = Bool(False).tag(sync=True)
"traitlets.Bool: the disabled status of the Datepicker object"
@@ -48,7 +52,7 @@ class DatePicker(v.Layout, SepalWidget):
# create the widgets
date_picker = v.DatePicker(no_title=True, v_model=None, scrollable=True)
- date_text = v.TextField(
+ self.date_text = v.TextField(
v_model=None,
label=label,
hint="YYYY-MM-DD format",
@@ -69,7 +73,7 @@ class DatePicker(v.Layout, SepalWidget):
{
"name": "activator",
"variable": "menuData",
- "children": date_text,
+ "children": self.date_text,
}
],
)
@@ -84,8 +88,28 @@ class DatePicker(v.Layout, SepalWidget):
# call the constructor
super().__init__(**kwargs)
- jslink((date_picker, "v_model"), (date_text, "v_model"))
- jslink((date_picker, "v_model"), (self, "v_model"))
+ jslink((date_picker, "v_model"), (self.date_text, "v_model"))
+ jslink((self, "v_model"), (date_picker, "v_model"))
+
+ @observe("v_model")
+ def check_date(self, change):
+ """
+ A method to check if the value of the set v_model is a correctly formated date
+ Reset the widget and display an error if it's not the case
+ """
+
+ self.date_text.error_messages = None
+
+ # exit immediately if nothing is set
+ if change["new"] is None:
+ return
+
+ # change the error status
+ if not self.is_valid_date(change["new"]):
+ msg = self.date_text.hint
+ self.date_text.error_messages = msg
+
+ return
@observe("v_model")
def close_menu(self, change):
@@ -104,6 +128,27 @@ class DatePicker(v.Layout, SepalWidget):
return
+ @staticmethod
+ def is_valid_date(date):
+ """
+ Check if the date is provided using the date format required for the widget
+
+ Args:
+ date (str): the date to test in YYYY-MM-DD format
+
+ Return:
+ (bool): the date to test
+ """
+
+ try:
+ date = datetime.strptime(date, "%Y-%m-%d")
+ valid = True
+
+ except Exception:
+ valid = False
+
+ return valid
+
class FileInput(v.Flex, SepalWidget):
"""
|
12rambau__sepal_ui-459
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/app.py:NavDrawer.__init__"
],
"edited_modules": [
"sepal_ui/sepalwidgets/app.py:NavDrawer"
]
},
"file": "sepal_ui/sepalwidgets/app.py"
},
{
"changes": {
"added_entities": [
"sepal_ui/translator/translator.py:Translator.delete_empty"
],
"added_modules": null,
"edited_entities": [
"sepal_ui/translator/translator.py:Translator.merge_dict"
],
"edited_modules": [
"sepal_ui/translator/translator.py:Translator"
]
},
"file": "sepal_ui/translator/translator.py"
}
] |
12rambau/sepal_ui
|
a4b3091755a11ef31a3714858007a93b750b6a79
|
crowdin untranslated keys are marked as empty string
These string are interpreted as "something" by the translator leading to empty strings everywhere in the build-in component.
They should be ignored
|
diff --git a/docs/source/modules/sepal_ui.translator.Translator.rst b/docs/source/modules/sepal_ui.translator.Translator.rst
index 60fa976c..642a3ab6 100644
--- a/docs/source/modules/sepal_ui.translator.Translator.rst
+++ b/docs/source/modules/sepal_ui.translator.Translator.rst
@@ -27,6 +27,7 @@ sepal\_ui.translator.Translator
~Translator.find_target
~Translator.available_locales
~Translator.merge_dict
+ ~Translator.delete_empty
.. automethod:: sepal_ui.translator.Translator.missing_keys
@@ -38,6 +39,8 @@ sepal\_ui.translator.Translator
.. automethod:: sepal_ui.translator.Translator.merge_dict
+.. automethod:: sepal_ui.translator.Translator.delete_empty
+
.. autofunction:: sepal_ui.translator.Translator.find_target
\ No newline at end of file
diff --git a/sepal_ui/message/en/locale.json b/sepal_ui/message/en/locale.json
index 632249f8..22b77234 100644
--- a/sepal_ui/message/en/locale.json
+++ b/sepal_ui/message/en/locale.json
@@ -2,11 +2,6 @@
"test_key": "Test key",
"status": "Status: {}",
"widgets": {
- "navdrawer": {
- "code": "Source code",
- "wiki": "Wiki",
- "bug": "Bug report"
- },
"asset_select": {
"types": {
"0": "Raster",
diff --git a/sepal_ui/sepalwidgets/app.py b/sepal_ui/sepalwidgets/app.py
index b004b9ee..df1e81d8 100644
--- a/sepal_ui/sepalwidgets/app.py
+++ b/sepal_ui/sepalwidgets/app.py
@@ -255,19 +255,13 @@ class NavDrawer(v.NavigationDrawer, SepalWidget):
code_link = []
if code:
- item_code = DrawerItem(
- ms.widgets.navdrawer.code, icon="far fa-file-code", href=code
- )
+ item_code = DrawerItem("Source code", icon="far fa-file-code", href=code)
code_link.append(item_code)
if wiki:
- item_wiki = DrawerItem(
- ms.widgets.navdrawer.wiki, icon="fas fa-book-open", href=wiki
- )
+ item_wiki = DrawerItem("Wiki", icon="fas fa-book-open", href=wiki)
code_link.append(item_wiki)
if issue:
- item_bug = DrawerItem(
- ms.widgets.navdrawer.bug, icon="fas fa-bug", href=issue
- )
+ item_bug = DrawerItem("Bug report", icon="fas fa-bug", href=issue)
code_link.append(item_bug)
children = [
diff --git a/sepal_ui/translator/translator.py b/sepal_ui/translator/translator.py
index f0a39f77..f4fdec47 100644
--- a/sepal_ui/translator/translator.py
+++ b/sepal_ui/translator/translator.py
@@ -166,7 +166,8 @@ class Translator(SimpleNamespace):
Identify numbered dictionnaries embeded in the dict and transform them into lists
This function is an helper to prevent deprecation after the introduction of pontoon for translation.
- The user is now force to use keys even for numbered lists. SimpleNamespace doesn't support integer indexing so this function will transform back this "numbered" dictionnary (with integer keys) into lists.
+ The user is now force to use keys even for numbered lists. SimpleNamespace doesn't support integer indexing
+ so this function will transform back this "numbered" dictionnary (with integer keys) into lists.
Args:
d (dict): the dictionnary to sanitize
@@ -252,7 +253,8 @@ class Translator(SimpleNamespace):
"""
gather all the .json file in the provided l10n folder as 1 single json dict
- the json dict will be sanitysed and the key will be used as if they were coming from 1 single file. be careful with duplication
+ the json dict will be sanitysed and the key will be used as if they were coming from 1 single file.
+ be careful with duplication. empty string keys will be removed.
Args:
folder (pathlib.path)
@@ -264,6 +266,29 @@ class Translator(SimpleNamespace):
final_json = {}
for f in folder.glob("*.json"):
- final_json = {**final_json, **cls.sanitize(json.loads(f.read_text()))}
+ tmp_dict = cls.delete_empty(json.loads(f.read_text()))
+ final_json = {**final_json, **cls.sanitize(tmp_dict)}
return final_json
+
+ @versionadded(version="2.8.1")
+ @classmethod
+ def delete_empty(cls, d):
+ """
+ Remove empty strings ("") recursively from the dictionaries. This is to prevent untranslated strings from
+ Crowdin to be uploaded. The dictionnary must only embed dictionnaries and no lists.
+
+ Args:
+ d (dict): the dictionnary to sanitize
+
+ Return:
+ (dict): the sanitized dictionnary
+
+ """
+ for k, v in list(d.items()):
+ if isinstance(v, dict):
+ cls.delete_empty(v)
+ elif v == "":
+ del d[k]
+
+ return d
|
12rambau__sepal_ui-501
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/app.py:LocaleSelect.__init__"
],
"edited_modules": [
"sepal_ui/sepalwidgets/app.py:LocaleSelect"
]
},
"file": "sepal_ui/sepalwidgets/app.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/translator/translator.py:Translator.__init__",
"sepal_ui/translator/translator.py:Translator.search_key",
"sepal_ui/translator/translator.py:Translator.missing_keys",
"sepal_ui/translator/translator.py:Translator.available_locales"
],
"edited_modules": [
"sepal_ui/translator/translator.py:Translator"
]
},
"file": "sepal_ui/translator/translator.py"
}
] |
12rambau/sepal_ui
|
7eb3f48735e1cfeac75fecf88dd8194c8daea3d3
|
use box for the translator ?
I discovered this lib while working on the geemap drop.
I think it could be super handy for the translator keys and maybe faster. https://github.com/cdgriffith/Box
side note: we will need it anyway for the geemap drop
|
diff --git a/docs/source/modules/sepal_ui.translator.Translator.rst b/docs/source/modules/sepal_ui.translator.Translator.rst
index 642a3ab6..7f11e39f 100644
--- a/docs/source/modules/sepal_ui.translator.Translator.rst
+++ b/docs/source/modules/sepal_ui.translator.Translator.rst
@@ -2,19 +2,6 @@ sepal\_ui.translator.Translator
===============================
.. autoclass:: sepal_ui.translator.Translator
-
- .. rubric:: Attributes
-
- .. autosummary::
-
- ~Translator.default_dict
- ~Translator.target_dict
- ~Translator.default
- ~Translator.target
- ~Translator.targeted
- ~Translator.match
- ~Translator.keys
- ~Translator.folder
.. rubric:: Methods
@@ -33,7 +20,7 @@ sepal\_ui.translator.Translator
.. automethod:: sepal_ui.translator.Translator.sanitize
-.. automethod:: sepal_ui.translator.Translator.search_key
+.. autofunction:: sepal_ui.translator.Translator.search_key
.. automethod:: sepal_ui.translator.Translator.available_locales
diff --git a/sepal_ui/sepalwidgets/app.py b/sepal_ui/sepalwidgets/app.py
index 96c10461..bfd59e3d 100644
--- a/sepal_ui/sepalwidgets/app.py
+++ b/sepal_ui/sepalwidgets/app.py
@@ -602,7 +602,7 @@ class LocaleSelect(v.Menu, SepalWidget):
# extract the language information from the translator
# if not set default to english
- code = "en" if translator is None else translator.target
+ code = "en" if translator is None else translator._target
loc = self.COUNTRIES[self.COUNTRIES.code == code].squeeze()
attr = {**self.ATTR, "src": self.FLAG.format(loc.flag), "alt": loc.name}
diff --git a/sepal_ui/translator/translator.py b/sepal_ui/translator/translator.py
index f4fdec47..efa29bc9 100644
--- a/sepal_ui/translator/translator.py
+++ b/sepal_ui/translator/translator.py
@@ -1,21 +1,26 @@
import json
-from types import SimpleNamespace
from pathlib import Path
from collections import abc
-from deepdiff import DeepDiff
from configparser import ConfigParser
-from deprecated.sphinx import versionadded
+from deprecated.sphinx import versionadded, deprecated
+from box import Box
from sepal_ui import config_file
-class Translator(SimpleNamespace):
+class Translator(Box):
"""
- The translator is a SimpleNamespace of Simplenamespace. It reads 2 Json files, the first one being the source language (usually English) and the second one the target language.
+ The translator is a Python Box of boxes. It reads 2 Json files, the first one being the source language (usually English) and the second one the target language.
It will replace in the source dictionary every key that exist in both json dictionaries. Following this procedure, every message that is not translated can still be accessed in the source language.
To access the dictionary keys, instead of using [], you can simply use key name as in an object ex: translator.first_key.secondary_key.
There are no depth limits, just respect the snake_case convention when naming your keys in the .json files.
+ 5 internal keys are created upon initialization (there name cannot be used as keys in the translation message):
+ - (str) _default : the default locale of the translator
+ - (str) _targeted : the initially requested language. Use to display debug information to the user agent
+ - (str) _target : the target locale of the translator
+ - (bool) _match : if the target language match the one requested one by user, used to trigger information in appBar
+ - (str) _folder : the path to the l10n folder
Args:
json_folder (str | pathlib.Path): The folder where the dictionaries are stored
@@ -23,75 +28,60 @@ class Translator(SimpleNamespace):
default (str, optional): The language code (IETF BCP 47) of the source lang. default to "en" (it should be the same as the source dictionary)
"""
- FORBIDDEN_KEYS = [
- "default_dict",
- "target_dict",
- "in",
- "class",
- "default",
- "target",
- "match",
- ]
- "list(str): list of the forbidden keys, using one of them in a translation dict will throw an error"
-
- target_dict = {}
- "(dict): the target language dictionary"
-
- default_dict = {}
- "dict: the source language dictionary"
-
- default = None
- "str: the default locale of the translator"
-
- targeted = None
- "str: the initially requested language. Use to display debug information to the user agent"
-
- target = None
- "str: the target locale of the translator"
-
- match = None
- "bool: if the target language match the one requested one by user, used to trigger information in appBar"
-
- keys = None
- "all the keys can be acceced as attributes"
-
- folder = None
- "pathlib.Path: the path to the l10n folder"
+ _protected_keys = [
+ "find_target",
+ "search_key",
+ "sanitize",
+ "_update",
+ "missing_keys",
+ "available_locales",
+ "merge_dict",
+ "delete_empty",
+ ] + dir(Box)
+ "keys that cannot be used as var names as they are protected for methods"
def __init__(self, json_folder, target=None, default="en"):
- # init the simple namespace
- super().__init__()
+ # the name of the 5 variables that cannot be used as init keys
+ FORBIDDEN_KEYS = ["_folder", "_default", "_target", "_targeted", "_match"]
- # force cast to path
- self.folder = Path(json_folder)
+ # init the box with the folder
+ folder = Path(json_folder)
# reading the default dict
- self.default = default
- self.default_dict = self.merge_dict(self.folder / default)
+ default_dict = self.merge_dict(folder / default)
# create a dictionary in the target language
- self.targeted, target = self.find_target(self.folder, target)
- self.target = target or default
- self.target_dict = self.merge_dict(self.folder / self.target)
+ targeted, target = self.find_target(folder, target)
+ target = target or default
+ target_dict = self.merge_dict(folder / target)
# evaluate the matching of requested and obtained values
- self.match = self.targeted == self.target
+ match = targeted == target
# create the composite dictionary
- ms_dict = self._update(self.default_dict, self.target_dict)
+ ms_dict = self._update(default_dict, target_dict)
# check if forbidden keys are being used
- [self.search_key(ms_dict, k) for k in self.FORBIDDEN_KEYS]
+ # this will raise an error if any
+ [self.search_key(ms_dict, k) for k in FORBIDDEN_KEYS]
- # transform it into a json str
+ # # unpack the json as a simple namespace
ms_json = json.dumps(ms_dict)
+ ms_boxes = json.loads(ms_json, object_hook=lambda d: Box(**d, frozen_box=True))
- # unpack the json as a simple namespace
- ms = json.loads(ms_json, object_hook=lambda d: SimpleNamespace(**d))
+ private_keys = {
+ "_folder": str(folder),
+ "_default": default,
+ "_targeted": targeted,
+ "_target": target,
+ "_match": match,
+ }
- for k, v in ms.__dict__.items():
- setattr(self, k, getattr(ms, k))
+ # the final box is not frozen
+ # waiting for an answer here: https://github.com/cdgriffith/Box/issues/223
+ # it the meantime it's easy to call the translator using a frozen_box argument
+ super(Box, self).__init__(**private_keys, **ms_boxes)
@versionadded(version="2.7.0")
@staticmethod
@@ -139,8 +129,8 @@ class Translator(SimpleNamespace):
return (target, lang)
- @classmethod
- def search_key(cls, d, key):
+ @staticmethod
+ def search_key(d, key):
"""
Search a specific key in the d dictionary and raise an error if found
@@ -149,14 +139,9 @@ class Translator(SimpleNamespace):
key (str): the key to look for
"""
- for k, v in d.items():
- if isinstance(v, abc.Mapping):
- cls.search_key(v, key)
- else:
- if k == key:
- raise Exception(
- f"You cannot use the key {key} in your translation dictionary"
- )
+ if key in d:
+ msg = f"You cannot use the key {key} in your translation dictionary"
+ raise Exception(msg)
return
@@ -218,34 +203,19 @@ class Translator(SimpleNamespace):
return ms
+ @deprecated(version="2.9.0", reason="Not needed with automatic translators")
def missing_keys(self):
- """
- this function is intended for developer use only
- print the list of the missing keys in the target dictionnairie
-
- Return:
- (str): the list of missing keys
- """
-
- # find all the missing keys
- try:
- ddiff = DeepDiff(self.default_dict, self.target_dict)[
- "dictionary_item_removed"
- ]
- except Exception:
- ddiff = ["All messages are translated"]
-
- return "\n".join(ddiff)
+ pass
def available_locales(self):
"""
Return the available locales in the l10n folder
Return:
- (list): the lilst of str codes
+ (list): the list of str codes
"""
- return [f.name for f in self.folder.iterdir() if f.is_dir()]
+ return [f.name for f in Path(self._folder).glob("[!^._]*") if f.is_dir()]
@versionadded(version="2.7.0")
@classmethod
|
12rambau__sepal_ui-516
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": [
"sepal_ui/aoi/aoi_model.py:AoiModel"
]
},
"file": "sepal_ui/aoi/aoi_model.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": null
},
"file": "sepal_ui/mapping/sepal_map.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": null
},
"file": "sepal_ui/mapping/value_inspector.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/tile.py:Tile.nest"
],
"edited_modules": [
"sepal_ui/sepalwidgets/tile.py:Tile"
]
},
"file": "sepal_ui/sepalwidgets/tile.py"
}
] |
12rambau/sepal_ui
|
9c319b0c21b8b1ba75173f3f85fd184747c398de
|
deprecate zip_dir
https://github.com/12rambau/sepal_ui/blob/a9255e7c566aac31ee7f8303e74fb7e8a3d57e5f/sepal_ui/aoi/aoi_model.py#L64
This folder is created on AOI call but is not used anymore as we are using the tmp module to create the tmp directory.
|
diff --git a/docs/source/modules/sepal_ui.aoi.AoiModel.rst b/docs/source/modules/sepal_ui.aoi.AoiModel.rst
index 0f5b8f1a..ccdcab52 100644
--- a/docs/source/modules/sepal_ui.aoi.AoiModel.rst
+++ b/docs/source/modules/sepal_ui.aoi.AoiModel.rst
@@ -12,7 +12,6 @@ sepal\_ui.aoi.AoiModel
~AoiModel.NAME
~AoiModel.ISO
~AoiModel.GADM_BASE_URL
- ~AoiModel.GADM_ZIP_DIR
~AoiModel.GAUL_ASSET
~AoiModel.ASSET_SUFFIX
~AoiModel.CUSTOM
diff --git a/sepal_ui/aoi/aoi_model.py b/sepal_ui/aoi/aoi_model.py
index ad2a72fb..40f9b4e6 100644
--- a/sepal_ui/aoi/aoi_model.py
+++ b/sepal_ui/aoi/aoi_model.py
@@ -61,11 +61,6 @@ class AoiModel(Model):
GADM_BASE_URL = "https://biogeo.ucdavis.edu/data/gadm3.6/gpkg/gadm36_{}_gpkg.zip"
"str: the base url to download gadm maps"
- GADM_ZIP_DIR = Path.home() / "tmp" / "GADM_zip"
- "pathlib.Path: the zip dir where we download the zips"
-
- GADM_ZIP_DIR.mkdir(parents=True, exist_ok=True)
-
GAUL_ASSET = "FAO/GAUL/2015/level{}"
"str: the GAUL asset name"
diff --git a/sepal_ui/mapping/sepal_map.py b/sepal_ui/mapping/sepal_map.py
index 6a518ccc..6ca115fc 100644
--- a/sepal_ui/mapping/sepal_map.py
+++ b/sepal_ui/mapping/sepal_map.py
@@ -16,7 +16,7 @@ import random
from haversine import haversine
import numpy as np
import rioxarray
-import xarray_leaflet
+import xarray_leaflet # noqa: F401
import matplotlib.pyplot as plt
from matplotlib import colors as mpc
from matplotlib import colorbar
@@ -38,11 +38,6 @@ from sepal_ui.mapping.basemaps import basemap_tiles
__all__ = ["SepalMap"]
-# call x_array leaflet at least once
-# flake8 will complain as it's a pluggin (i.e. never called)
-# We don't want to ignore testing F401
-xarray_leaflet
-
class SepalMap(ipl.Map):
"""
diff --git a/sepal_ui/mapping/value_inspector.py b/sepal_ui/mapping/value_inspector.py
index f848018f..783d68ad 100644
--- a/sepal_ui/mapping/value_inspector.py
+++ b/sepal_ui/mapping/value_inspector.py
@@ -3,7 +3,7 @@ import ee
import geopandas as gpd
from shapely import geometry as sg
import rioxarray
-import xarray_leaflet
+import xarray_leaflet # noqa: F401
from rasterio.crs import CRS
import rasterio as rio
import ipyvuetify as v
@@ -16,11 +16,6 @@ from sepal_ui.mapping.map_btn import MapBtn
from sepal_ui.frontend.styles import COMPONENTS
from sepal_ui.message import ms
-# call x_array leaflet at least once
-# flake8 will complain as it's a pluggin (i.e. never called)
-# We don't want to ignore testing F401
-xarray_leaflet
-
class ValueInspector(WidgetControl):
"""
diff --git a/sepal_ui/sepalwidgets/tile.py b/sepal_ui/sepalwidgets/tile.py
index dec40168..69a92dc0 100644
--- a/sepal_ui/sepalwidgets/tile.py
+++ b/sepal_ui/sepalwidgets/tile.py
@@ -76,7 +76,7 @@ class Tile(v.Layout, SepalWidget):
self._metadata["mount_id"] = "nested_tile"
# remove elevation
- self.elevation = False
+ self.children[0].elevation = False
# remove title
self.set_title()
|
12rambau__sepal_ui-518
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/mapping/aoi_control.py:AoiControl.__init__"
],
"edited_modules": [
"sepal_ui/mapping/aoi_control.py:AoiControl"
]
},
"file": "sepal_ui/mapping/aoi_control.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/mapping/fullscreen_control.py:FullScreenControl.__init__",
"sepal_ui/mapping/fullscreen_control.py:FullScreenControl.toggle_fullscreen"
],
"edited_modules": [
"sepal_ui/mapping/fullscreen_control.py:FullScreenControl"
]
},
"file": "sepal_ui/mapping/fullscreen_control.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/mapping/map_btn.py:MapBtn.__init__"
],
"edited_modules": [
"sepal_ui/mapping/map_btn.py:MapBtn"
]
},
"file": "sepal_ui/mapping/map_btn.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/mapping/value_inspector.py:ValueInspector.__init__"
],
"edited_modules": [
"sepal_ui/mapping/value_inspector.py:ValueInspector"
]
},
"file": "sepal_ui/mapping/value_inspector.py"
}
] |
12rambau/sepal_ui
|
698d446e33062934d49f9edb91cbe303b73e786f
|
add posibility to add text in the map_btn
The current implementation of the map_btn only authorize to use logos. It would be nice to let the opportunity to use letters as in the SEPAL main framework (3 letters only in capital)
|
diff --git a/sepal_ui/mapping/aoi_control.py b/sepal_ui/mapping/aoi_control.py
index 01a6aa48..ae143d2c 100644
--- a/sepal_ui/mapping/aoi_control.py
+++ b/sepal_ui/mapping/aoi_control.py
@@ -36,7 +36,7 @@ class AoiControl(WidgetControl):
kwargs["position"] = kwargs.pop("position", "topright")
# create a hoverable btn
- btn = MapBtn(logo="fas fa-search-location", v_on="menu.on")
+ btn = MapBtn(content="fas fa-search-location", v_on="menu.on")
slot = {"name": "activator", "variable": "menu", "children": btn}
self.aoi_list = sw.ListItemGroup(children=[], v_model="")
w_list = sw.List(
diff --git a/sepal_ui/mapping/fullscreen_control.py b/sepal_ui/mapping/fullscreen_control.py
index 5e23c1d6..2855fa72 100644
--- a/sepal_ui/mapping/fullscreen_control.py
+++ b/sepal_ui/mapping/fullscreen_control.py
@@ -43,7 +43,7 @@ class FullScreenControl(WidgetControl):
self.zoomed = fullscreen
# create a btn
- self.w_btn = MapBtn(logo=self.ICONS[self.zoomed])
+ self.w_btn = MapBtn(self.ICONS[self.zoomed])
# overwrite the widget set in the kwargs (if any)
kwargs["widget"] = self.w_btn
@@ -88,7 +88,7 @@ class FullScreenControl(WidgetControl):
self.zoomed = not self.zoomed
# change button icon
- self.w_btn.logo.children = [self.ICONS[self.zoomed]]
+ self.w_btn.children[0].children = [self.ICONS[self.zoomed]]
# zoom
self.template.send({"method": self.METHODS[self.zoomed], "args": []})
diff --git a/sepal_ui/mapping/map_btn.py b/sepal_ui/mapping/map_btn.py
index ab55e1c8..0ea13364 100644
--- a/sepal_ui/mapping/map_btn.py
+++ b/sepal_ui/mapping/map_btn.py
@@ -7,26 +7,26 @@ from sepal_ui.frontend.styles import map_btn_style
class MapBtn(v.Btn, sw.SepalWidget):
"""
Btn specifically design to be displayed on a map. It matches all the characteristics of
- the classic leaflet btn but as they are from ipyvuetify we can use them in combination with Menu to produce on-the-map. The MapBtn is responsive to theme changes.
- Tiles. It only accept icon as children as the space is very limited.
+ the classic leaflet btn but as they are from ipyvuetify we can use them in combination with Menu to produce on-the-map tiles.
+ The MapBtn is responsive to theme changes. It only accept icon or 3 letters as children as the space is very limited.
Args:
- logo (str): a fas/mdi fully qualified name
+ content (str): a fas/mdi fully qualified name or a string name. If a string name is used, only the 3 first letters will be displayed.
"""
- logo = None
- "(sw.Icon): a sw.Icon"
-
- def __init__(self, logo, **kwargs):
+ def __init__(self, content, **kwargs):
# create the icon
- self.logo = sw.Icon(small=True, children=[logo])
+ if content.startswith("mdi-") or content.startswith("fas fa-"):
+ content = sw.Icon(small=True, children=[content])
+ else:
+ content = content[: min(3, len(content))].upper()
# some parameters are overloaded to match the map requirements
kwargs["color"] = "text-color"
kwargs["outlined"] = True
kwargs["style_"] = " ".join([f"{k}: {v};" for k, v in map_btn_style.items()])
- kwargs["children"] = [self.logo]
+ kwargs["children"] = [content]
kwargs["icon"] = False
super().__init__(**kwargs)
diff --git a/sepal_ui/mapping/value_inspector.py b/sepal_ui/mapping/value_inspector.py
index ecc52e72..96508ba3 100644
--- a/sepal_ui/mapping/value_inspector.py
+++ b/sepal_ui/mapping/value_inspector.py
@@ -54,7 +54,7 @@ class ValueInspector(WidgetControl):
)
# create a clickable btn
- btn = MapBtn(logo="fas fa-crosshairs", v_on="menu.on")
+ btn = MapBtn("fas fa-crosshairs", v_on="menu.on")
slot = {"name": "activator", "variable": "menu", "children": btn}
close_btn = sw.Icon(children=["fas fa-times"], small=True)
title = sw.Html(tag="h4", children=[ms.v_inspector.title])
|
12rambau__sepal_ui-535
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/mapping/sepal_map.py:SepalMap.zoom_bounds"
],
"edited_modules": [
"sepal_ui/mapping/sepal_map.py:SepalMap"
]
},
"file": "sepal_ui/mapping/sepal_map.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/reclassify/table_view.py:EditDialog.__init__"
],
"edited_modules": [
"sepal_ui/reclassify/table_view.py:EditDialog"
]
},
"file": "sepal_ui/reclassify/table_view.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/alert.py:Banner.set_btn"
],
"edited_modules": [
"sepal_ui/sepalwidgets/alert.py:Banner"
]
},
"file": "sepal_ui/sepalwidgets/alert.py"
},
{
"changes": {
"added_entities": [
"sepal_ui/translator/translator.py:Translator.key_use"
],
"added_modules": null,
"edited_entities": null,
"edited_modules": [
"sepal_ui/translator/translator.py:Translator"
]
},
"file": "sepal_ui/translator/translator.py"
}
] |
12rambau/sepal_ui
|
6a619361e90ab318463e2094fc9dbcbc85dd2e8f
|
create a translator function to check the use of the keys
If you are updating many time the same application you may end up removing some or all the existing keys. It complex to visually assess if all the remaining keys in the dict are used.
Maybe a parser could be interesting to check all the folder files and validate the keys that are used.
Usage will be of course for developer only
|
diff --git a/sepal_ui/mapping/sepal_map.py b/sepal_ui/mapping/sepal_map.py
index 57693e56..e2860daf 100644
--- a/sepal_ui/mapping/sepal_map.py
+++ b/sepal_ui/mapping/sepal_map.py
@@ -227,8 +227,8 @@ class SepalMap(ipl.Map):
# Center map to the centroid of the layer(s)
self.center = [(maxy - miny) / 2 + miny, (maxx - minx) / 2 + minx]
- # create the tuples for each corner
- tl, br, bl, tr = (minx, maxy), (maxx, miny), (minx, miny), (maxx, maxy)
+ # create the tuples for each corner in (lat/lng) convention
+ tl, br, bl, tr = (maxy, minx), (miny, maxx), (miny, minx), (maxy, maxx)
# find zoom level to display the biggest diagonal (in km)
lg, zoom = 40075, 1 # number of displayed km at zoom 1
diff --git a/sepal_ui/message/en/locale.json b/sepal_ui/message/en/locale.json
index 5989dee5..b689db70 100644
--- a/sepal_ui/message/en/locale.json
+++ b/sepal_ui/message/en/locale.json
@@ -37,7 +37,6 @@
"custom": "Custom",
"no_access": "It seems like you do not have access to the input asset or it does not exist.",
"wrong_type": "The type of the selected asset ({}) does not match authorized asset type ({}).",
- "hint": "Select an asset in the list or write a custom asset name. Be careful, you need to have access to this asset to use it",
"placeholder": "users/custom_user/custom_asset"
},
"load_table": {
@@ -65,20 +64,6 @@
"asset": "GEE Asset name",
"btn": "Select AOI",
"complete": "The AOI has been selected",
- "shape_drawn": "A shape has been drawn",
- "file_pattern": "aoi_{}",
- "no_selection": "No selection method has been picked up",
- "no_country": "No Country has been selected",
- "asset_already_exist": "The asset was already existing you can continue to use it. It's also available at :{}",
- "asset_created": "The asset has been created under the name : {}",
- "name_used": "The name was already in used, change it or delete the previous asset in your GEE acount",
- "no_asset": "No Asset has been provided",
- "check_if_asset": "Check carefully that your string is an assetId",
- "not_available": "This function is not yet available",
- "no_shape": "No shape has been drawn on the map",
- "shp_error": "An error occured with provided .shp file",
- "aoi_message": "click on \"selet these inputs\" to validate your AOI",
- "geojson_to_ee": "Convert your .csv file into a ee_object",
"exception" : {
"no_inputs": "Please provide fully qualified inputs before validating your AOI",
"no_asset" : "Please select an asset.",
@@ -98,7 +83,6 @@
"planet" : {
"exception" : {
"empty": "Please fill the required field(s).",
- "format" : "Please check the format of your inputs.",
"invalid" : "Invalid email or password",
"nosubs" : "Your credentials do not have any valid planet subscription."
},
@@ -143,7 +127,7 @@
"0": "New element",
"1": "Modify element"
},
- "btn": {
+ "btn": {
"save": {
"name": "save",
"tooltip": "create new class"
diff --git a/sepal_ui/reclassify/table_view.py b/sepal_ui/reclassify/table_view.py
index 0f8bf1cd..c3f8a35a 100644
--- a/sepal_ui/reclassify/table_view.py
+++ b/sepal_ui/reclassify/table_view.py
@@ -212,15 +212,24 @@ class EditDialog(v.Dialog):
self.title = v.CardTitle(children=[self.TITLES[0]])
# Action buttons
- btn_txt = ms.rec.table.edit_dialog.btn
- self.save = sw.Btn(btn_txt.save.name)
- save_tool = sw.Tooltip(self.save, btn_txt.save.tooltip, bottom=True)
+ self.save = sw.Btn(ms.rec.table.edit_dialog.btn.save.name)
+ save_tool = sw.Tooltip(
+ self.save, ms.rec.table.edit_dialog.btn.save.tooltip, bottom=True
+ )
- self.modify = sw.Btn(btn_txt.modify.name).hide() # by default modify is hidden
- modify_tool = sw.Tooltip(self.modify, btn_txt.modify.tooltip, bottom=True)
+ self.modify = sw.Btn(
+ ms.rec.table.edit_dialog.btn.modify.name
+ ).hide() # by default modify is hidden
+ modify_tool = sw.Tooltip(
+ self.modify, ms.rec.table.edit_dialog.btn.modify.tooltip, bottom=True
+ )
- self.cancel = sw.Btn(btn_txt.cancel.name, outlined=True, class_="ml-2")
- cancel_tool = sw.Tooltip(self.cancel, btn_txt.cancel.tooltip, bottom=True)
+ self.cancel = sw.Btn(
+ ms.rec.table.edit_dialog.btn.cancel.name, outlined=True, class_="ml-2"
+ )
+ cancel_tool = sw.Tooltip(
+ self.cancel, ms.rec.table.edit_dialog.btn.cancel.tooltip, bottom=True
+ )
actions = v.CardActions(children=[save_tool, modify_tool, cancel_tool])
diff --git a/sepal_ui/sepalwidgets/alert.py b/sepal_ui/sepalwidgets/alert.py
index 6d869aaa..e143170e 100644
--- a/sepal_ui/sepalwidgets/alert.py
+++ b/sepal_ui/sepalwidgets/alert.py
@@ -380,8 +380,11 @@ class Banner(v.Snackbar, SepalWidget):
Args:
nb_banner (int): the number of banners in the queue
"""
- msg = ms.widgets.banner
- txt = msg.close if nb_banner == 0 else msg.next.format(nb_banner)
+ # do not wrap ms.widget.banner. If you do it won't be recognized by the key-checker of the Translator
+ if nb_banner == 0:
+ txt = ms.widgets.banner.close
+ else:
+ txt = ms.widgets.banner.next.format(nb_banner)
self.btn_close.children = [txt]
return
diff --git a/sepal_ui/translator/translator.py b/sepal_ui/translator/translator.py
index 5cf26320..f3a4b791 100644
--- a/sepal_ui/translator/translator.py
+++ b/sepal_ui/translator/translator.py
@@ -3,6 +3,7 @@ from collections import abc
from configparser import ConfigParser
from pathlib import Path
+import pandas as pd
from box import Box
from deprecated.sphinx import deprecated, versionadded
@@ -262,3 +263,58 @@ class Translator(Box):
del d[k]
return d
+
+ @versionadded(version="2.10.0")
+ def key_use(self, folder, name):
+ """
+ Parse all the files in the folder and check if keys are all used at least once.
+ Return the unused key names.
+
+ .. warning::
+
+ Don't forget that there are many ways of calling Translator variables
+ (getattr, save.cm.xxx in another variable etc...) SO don't forget to check
+ manually the variables suggested by this method before deleting them
+
+ Args:
+ folder (pathlib.Path): The application folder using this translator data
+ name (str): the name use by the translator in this app (usually "cm")
+
+ Return:
+ (list): the list of unused keys
+ """
+ # cannot set FORBIDDEN_KEY in the Box as it would lock another key
+ FORBIDDEN_KEYS = ["_folder", "_default", "_target", "_targeted", "_match"]
+
+ # sanitize folder
+ folder = Path(folder)
+
+ # get all the python files recursively
+ py_files = [
+ f for f in folder.glob("**/*.py") if ".ipynb_checkpoints" not in str(f)
+ ]
+
+ # get the flat version of all keys
+ keys = list(set(pd.json_normalize(self).columns) ^ set(FORBIDDEN_KEYS))
+
+ # init the unused keys list
+ unused_keys = []
+
+ for k in keys:
+
+ # by default we consider that the is never used
+ is_present = False
+
+ # read each python file and search for the pattern of the key
+ # if it's find change status of the counter and exit the search
+ for f in py_files:
+ tmp = f.read_text()
+ if f"{name}.{k}" in tmp:
+ is_present = True
+ break
+
+ # if nothing is find, the value is still False and the key can be
+ # added to the list
+ is_present or unused_keys.append(k)
+
+ return unused_keys
|
12rambau__sepal_ui-574
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/translator/translator.py:Translator.__init__",
"sepal_ui/translator/translator.py:Translator.search_key"
],
"edited_modules": [
"sepal_ui/translator/translator.py:Translator"
]
},
"file": "sepal_ui/translator/translator.py"
}
] |
12rambau/sepal_ui
|
412e02ef08df68c256f384081d2c7eaecc09428e
|
_protected_keys are not raising error when used in translator
`protected_keys` are not raising errors when used in a json translation file. It is also happening with the "`FORBIDDEN_KEYS`" when are used in nested levels.
To reproduce...
```Python
# set up the appropriate keys for each language
keys = {
"en": {
"find_target": "A key",
"test_key": "Test key",
"nested" : {
"items" : {
"_target" : "value"
},
},
"merge_dict" : "value"
},
"fr": {
"a_key": "Une clef",
"test_key": "Clef de test"
},
"fr-FR": {
"a_key": "Une clef",
"test_key": "Clef de test"
},
"es": {
"a_key": "Una llave"
},
}
# generate the tmp_dir in the test directory
tmp_dir = Path(".").parent / "data" / "messages"
tmp_dir.mkdir(exist_ok=True, parents=True)
# create the translation files
for lan, d in keys.items():
folder = tmp_dir / lan
folder.mkdir(exist_ok=True)
(folder / "locale.json").write_text(json.dumps(d, indent=2))
```
When the object is being instantiated, there's not any error to alert that the nested key "`_target`" cannot be used, nor the "`find_target`" in the first level.
```Python
translator = Translator(tmp_dir, "en")
```
|
diff --git a/sepal_ui/translator/translator.py b/sepal_ui/translator/translator.py
index 1ad14c98..ea647223 100644
--- a/sepal_ui/translator/translator.py
+++ b/sepal_ui/translator/translator.py
@@ -65,7 +65,7 @@ class Translator(Box):
# check if forbidden keys are being used
# this will raise an error if any
- [self.search_key(ms_dict, k) for k in FORBIDDEN_KEYS]
+ [self.search_key(ms_dict, k) for k in FORBIDDEN_KEYS + self._protected_keys]
# # unpack the json as a simple namespace
ms_json = json.dumps(ms_dict)
@@ -130,8 +130,7 @@ class Translator(Box):
return (target, lang)
- @staticmethod
- def search_key(d, key):
+ def search_key(self, d, key):
"""
Search a specific key in the d dictionary and raise an error if found
@@ -144,7 +143,9 @@ class Translator(Box):
msg = f"You cannot use the key {key} in your translation dictionary"
raise Exception(msg)
- return
+ for k, v in d.items():
+ if isinstance(v, dict):
+ return self.search_key(v, key)
@classmethod
def sanitize(cls, d):
|
12rambau__sepal_ui-601
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/inputs.py:DatePicker.__init__",
"sepal_ui/sepalwidgets/inputs.py:DatePicker.check_date"
],
"edited_modules": [
"sepal_ui/sepalwidgets/inputs.py:DatePicker"
]
},
"file": "sepal_ui/sepalwidgets/inputs.py"
}
] |
12rambau/sepal_ui
|
89f8d87dc4f83bfc2e96a111692ae252e470e8bc
|
Datepicker is not fully customizable
As our main `DatePicker` usage is as in its "menu" form, it is not handy to set some use cases:
- set a min_, max_ value directly (you have to `datepicker.children.....min_`...)
- set a default initial value with `v_model` since it is hardcoded from the beginning
- the `jslink` "link" will only work if the change is made from a "js" event, but not if you want to link the values since the initialization.
|
diff --git a/sepal_ui/sepalwidgets/inputs.py b/sepal_ui/sepalwidgets/inputs.py
index 95fda88a..6293f828 100644
--- a/sepal_ui/sepalwidgets/inputs.py
+++ b/sepal_ui/sepalwidgets/inputs.py
@@ -6,6 +6,7 @@ import ee
import geopandas as gpd
import ipyvuetify as v
import pandas as pd
+from deprecated.sphinx import versionadded
from ipywidgets import jslink
from natsort import humansorted
from traitlets import Any, Bool, Dict, Int, List, Unicode, link, observe
@@ -29,13 +30,18 @@ __all__ = [
]
+@versionadded(
+ version="2.13.0",
+ reason="Empty v_model will be treated as empty string: :code:`v_model=''`.",
+)
class DatePicker(v.Layout, SepalWidget):
"""
Custom input widget to provide a reusable DatePicker. It allows to choose date as a string in the following format YYYY-MM-DD
Args:
label (str, optional): the label of the datepicker field
- kwargs (optional): any parameter from a v.Layout abject. If set, 'children' will be overwritten.
+ layout_kwargs (dict, optional): any parameter for the wrapper layout
+ kwargs (optional): any parameter from a v.DatePicker abject.
"""
@@ -48,13 +54,14 @@ class DatePicker(v.Layout, SepalWidget):
disabled = Bool(False).tag(sync=True)
"traitlets.Bool: the disabled status of the Datepicker object"
- def __init__(self, label="Date", **kwargs):
+ def __init__(self, label="Date", layout_kwargs={}, **kwargs):
+
+ kwargs["v_model"] = kwargs.get("v_model", "")
# create the widgets
- date_picker = v.DatePicker(no_title=True, v_model=None, scrollable=True)
+ self.date_picker = v.DatePicker(no_title=True, scrollable=True, **kwargs)
self.date_text = v.TextField(
- v_model=None,
label=label,
hint="YYYY-MM-DD format",
persistent_hint=True,
@@ -69,7 +76,7 @@ class DatePicker(v.Layout, SepalWidget):
offset_y=True,
v_model=False,
close_on_content_click=False,
- children=[date_picker],
+ children=[self.date_picker],
v_slots=[
{
"name": "activator",
@@ -80,17 +87,18 @@ class DatePicker(v.Layout, SepalWidget):
)
# set the default parameter
- kwargs["v_model"] = kwargs.pop("v_model", None)
- kwargs["row"] = kwargs.pop("row", True)
- kwargs["class_"] = kwargs.pop("class_", "pa-5")
- kwargs["align_center"] = kwargs.pop("align_center", True)
- kwargs["children"] = [v.Flex(xs10=True, children=[self.menu])]
+ layout_kwargs["row"] = layout_kwargs.get("row", True)
+ layout_kwargs["class_"] = layout_kwargs.get("class_", "pa-5")
+ layout_kwargs["align_center"] = layout_kwargs.get("align_center", True)
+ layout_kwargs["children"] = layout_kwargs.pop(
+ "children", [v.Flex(xs10=True, children=[self.menu])]
+ )
# call the constructor
- super().__init__(**kwargs)
+ super().__init__(**layout_kwargs)
- jslink((date_picker, "v_model"), (self.date_text, "v_model"))
- jslink((self, "v_model"), (date_picker, "v_model"))
+ link((self.date_picker, "v_model"), (self.date_text, "v_model"))
+ link((self.date_picker, "v_model"), (self, "v_model"))
@observe("v_model")
def check_date(self, change):
@@ -102,7 +110,7 @@ class DatePicker(v.Layout, SepalWidget):
self.date_text.error_messages = None
# exit immediately if nothing is set
- if change["new"] is None:
+ if not change["new"]:
return
# change the error status
|
12rambau__sepal_ui-608
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/reclassify/reclassify_view.py:ImportMatrixDialog.__init__",
"sepal_ui/reclassify/reclassify_view.py:SaveMatrixDialog.__init__",
"sepal_ui/reclassify/reclassify_view.py:ReclassifyView.__init__"
],
"edited_modules": [
"sepal_ui/reclassify/reclassify_view.py:ImportMatrixDialog",
"sepal_ui/reclassify/reclassify_view.py:SaveMatrixDialog",
"sepal_ui/reclassify/reclassify_view.py:ReclassifyView"
]
},
"file": "sepal_ui/reclassify/reclassify_view.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/reclassify/table_view.py:ClassTable.__init__",
"sepal_ui/reclassify/table_view.py:EditDialog.__init__",
"sepal_ui/reclassify/table_view.py:SaveDialog.__init__",
"sepal_ui/reclassify/table_view.py:TableView.__init__"
],
"edited_modules": [
"sepal_ui/reclassify/table_view.py:ClassTable",
"sepal_ui/reclassify/table_view.py:EditDialog",
"sepal_ui/reclassify/table_view.py:SaveDialog",
"sepal_ui/reclassify/table_view.py:TableView"
]
},
"file": "sepal_ui/reclassify/table_view.py"
},
{
"changes": {
"added_entities": [
"sepal_ui/sepalwidgets/btn.py:Btn._set_gliph",
"sepal_ui/sepalwidgets/btn.py:Btn._set_text"
],
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/btn.py:Btn.__init__",
"sepal_ui/sepalwidgets/btn.py:Btn.set_icon"
],
"edited_modules": [
"sepal_ui/sepalwidgets/btn.py:Btn"
]
},
"file": "sepal_ui/sepalwidgets/btn.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/inputs.py:FileInput.__init__"
],
"edited_modules": [
"sepal_ui/sepalwidgets/inputs.py:FileInput"
]
},
"file": "sepal_ui/sepalwidgets/inputs.py"
}
] |
12rambau/sepal_ui
|
2d5126f5e9521470cbeb5ad374f74046e889f771
|
create a function to set the text of the btn dynamically
icon and text should be editable dynamically
https://github.com/12rambau/sepal_ui/blob/8af255ec0d1cb3ad4dd74d021ad140fafef756f6/sepal_ui/sepalwidgets/btn.py#L38
|
diff --git a/docs/source/widgets/btn.rst b/docs/source/widgets/btn.rst
index 949d5468..91d92967 100644
--- a/docs/source/widgets/btn.rst
+++ b/docs/source/widgets/btn.rst
@@ -20,8 +20,8 @@ The default color is set to "primary".
v.theme.dark = False
btn = sw.Btn(
- text = "The One btn",
- icon = "fas fa-cogs"
+ msg = "The One btn",
+ gliph = "fas fa-cogs"
)
btn
@@ -42,8 +42,8 @@ Btn can be used to launch function on any Javascript event such as "click".
v.theme.dark = False
btn = sw.Btn(
- text = "The One btn",
- icon = "fas fa-cogs"
+ msg = "The One btn",
+ gliph = "fas fa-cogs"
)
btn.on_event('click', lambda *args: print('Hello world!'))
diff --git a/sepal_ui/reclassify/reclassify_view.py b/sepal_ui/reclassify/reclassify_view.py
index f4d6ca40..18a90455 100644
--- a/sepal_ui/reclassify/reclassify_view.py
+++ b/sepal_ui/reclassify/reclassify_view.py
@@ -33,8 +33,8 @@ class ImportMatrixDialog(v.Dialog):
# create the 3 widgets
title = v.CardTitle(children=["Load reclassification matrix"])
self.w_file = sw.FileInput(label="filename", folder=folder)
- self.load_btn = sw.Btn("Load")
- cancel = sw.Btn("Cancel", outlined=True)
+ self.load_btn = sw.Btn(msg="Load")
+ cancel = sw.Btn(msg="Cancel", outlined=True)
actions = v.CardActions(children=[cancel, self.load_btn])
# default params
@@ -81,8 +81,8 @@ class SaveMatrixDialog(v.Dialog):
# create the widgets
title = v.CardTitle(children=["Save matrix"])
self.w_file = v.TextField(label="filename", v_model=None)
- btn = sw.Btn("Save matrix")
- cancel = sw.Btn("Cancel", outlined=True)
+ btn = sw.Btn(msg="Save matrix")
+ cancel = sw.Btn(msg="Cancel", outlined=True)
actions = v.CardActions(children=[cancel, btn])
self.alert = sw.Alert(children=["Choose a name for the output"]).show()
@@ -464,7 +464,7 @@ class ReclassifyView(sw.Card):
self.btn_list = [
sw.Btn(
- "Custom",
+ msg="Custom",
_metadata={"path": "custom"},
small=True,
class_="mr-2",
@@ -472,7 +472,7 @@ class ReclassifyView(sw.Card):
)
] + [
sw.Btn(
- f"use {name}",
+ msg=f"use {name}",
_metadata={"path": path},
small=True,
class_="mr-2",
@@ -490,18 +490,20 @@ class ReclassifyView(sw.Card):
self.save_dialog = SaveMatrixDialog(folder=out_path)
self.import_dialog = ImportMatrixDialog(folder=out_path)
self.get_table = sw.Btn(
- ms.rec.rec.input.btn, "far fa-table", color="success", small=True
+ msg=ms.rec.rec.input.btn, gliph="far fa-table", color="success", small=True
)
self.import_table = sw.Btn(
- "import",
- "fas fa-download",
+ msg="import",
+ gliph="fas fa-download",
color="secondary",
small=True,
class_="ml-2 mr-2",
)
- self.save_table = sw.Btn("save", "fas fa-save", color="secondary", small=True)
+ self.save_table = sw.Btn(
+ msg="save", gliph="fas fa-save", color="secondary", small=True
+ )
self.reclassify_btn = sw.Btn(
- ms.rec.rec.btn, "fas fa-chess-board", small=True, disabled=True
+ msg=ms.rec.rec.btn, gliph="fas fa-chess-board", small=True, disabled=True
)
self.toolbar = v.Toolbar(
diff --git a/sepal_ui/reclassify/table_view.py b/sepal_ui/reclassify/table_view.py
index c3f8a35a..24ac31b5 100644
--- a/sepal_ui/reclassify/table_view.py
+++ b/sepal_ui/reclassify/table_view.py
@@ -49,19 +49,24 @@ class ClassTable(sw.DataTable):
# create the 4 CRUD btn
# and set them in the top slot of the table
self.edit_btn = sw.Btn(
- ms.rec.table.btn.edit,
- icon="fas fa-pencil-alt",
+ msg=ms.rec.table.btn.edit,
+ gliph="fas fa-pencil-alt",
class_="ml-2 mr-2",
color="secondary",
small=True,
)
self.delete_btn = sw.Btn(
- ms.rec.table.btn.delete, icon="fas fa-trash-alt", color="error", small=True
+ msg=ms.rec.table.btn.delete,
+ gliph="fas fa-trash-alt",
+ color="error",
+ small=True,
)
self.add_btn = sw.Btn(
- ms.rec.table.btn.add, icon="fas fa-plus", color="success", small=True
+ msg=ms.rec.table.btn.add, gliph="fas fa-plus", color="success", small=True
+ )
+ self.save_btn = sw.Btn(
+ msg=ms.rec.table.btn.save, gliph="far fa-save", small=True
)
- self.save_btn = sw.Btn(ms.rec.table.btn.save, icon="far fa-save", small=True)
slot = v.Toolbar(
class_="d-flex mb-6",
@@ -212,20 +217,19 @@ class EditDialog(v.Dialog):
self.title = v.CardTitle(children=[self.TITLES[0]])
# Action buttons
- self.save = sw.Btn(ms.rec.table.edit_dialog.btn.save.name)
+ self.save = sw.Btn(msg=ms.rec.table.edit_dialog.btn.save.name)
save_tool = sw.Tooltip(
self.save, ms.rec.table.edit_dialog.btn.save.tooltip, bottom=True
)
- self.modify = sw.Btn(
- ms.rec.table.edit_dialog.btn.modify.name
- ).hide() # by default modify is hidden
+ self.modify = sw.Btn(msg=ms.rec.table.edit_dialog.btn.modify.name)
+ self.modify.hide() # by default modify is hidden
modify_tool = sw.Tooltip(
self.modify, ms.rec.table.edit_dialog.btn.modify.tooltip, bottom=True
)
self.cancel = sw.Btn(
- ms.rec.table.edit_dialog.btn.cancel.name, outlined=True, class_="ml-2"
+ msg=ms.rec.table.edit_dialog.btn.cancel.name, outlined=True, class_="ml-2"
)
cancel_tool = sw.Tooltip(
self.cancel, ms.rec.table.edit_dialog.btn.cancel.tooltip, bottom=True
@@ -437,7 +441,7 @@ class SaveDialog(v.Dialog):
v_model=ms.rec.table.save_dialog.placeholder,
)
- self.save = sw.Btn(ms.rec.table.save_dialog.btn.save.name)
+ self.save = sw.Btn(msg=ms.rec.table.save_dialog.btn.save.name)
save = sw.Tooltip(
self.save,
ms.rec.table.save_dialog.btn.save.tooltip,
@@ -446,7 +450,7 @@ class SaveDialog(v.Dialog):
)
self.cancel = sw.Btn(
- ms.rec.table.save_dialog.btn.cancel.name, outlined=True, class_="ml-2"
+ msg=ms.rec.table.save_dialog.btn.cancel.name, outlined=True, class_="ml-2"
)
cancel = sw.Tooltip(
self.cancel, ms.rec.table.save_dialog.btn.cancel.tooltip, bottom=True
@@ -600,8 +604,8 @@ class TableView(sw.Card):
folder=self.class_path,
)
self.btn = sw.Btn(
- ms.rec.table.classif.btn,
- icon="far fa-table",
+ msg=ms.rec.table.classif.btn,
+ gliph="far fa-table",
color="success",
outlined=True,
)
diff --git a/sepal_ui/sepalwidgets/btn.py b/sepal_ui/sepalwidgets/btn.py
index c6437d86..137622fa 100644
--- a/sepal_ui/sepalwidgets/btn.py
+++ b/sepal_ui/sepalwidgets/btn.py
@@ -1,6 +1,9 @@
+import warnings
from pathlib import Path
import ipyvuetify as v
+from deprecated.sphinx import deprecated
+from traitlets import Unicode, observe
from sepal_ui.scripts import utils as su
from sepal_ui.sepalwidgets.sepalwidget import SepalWidget
@@ -14,27 +17,83 @@ class Btn(v.Btn, SepalWidget):
the color will be defaulted to 'primary' and can be changed afterward according to your need
Args:
+ msg (str, optional): the text to display in the btn
+ gliph (str, optional): the full name of any mdi/fa icon
text (str, optional): the text to display in the btn
icon (str, optional): the full name of any mdi/fa icon
kwargs (dict, optional): any parameters from v.Btn. if set, 'children' will be overwritten.
+
+ .. deprecated:: 2.13
+ ``text`` and ``icon`` will be replaced by ``msg`` and ``gliph`` to avoid duplicating ipyvuetify trait.
"""
v_icon = None
"v.Icon: the icon in the btn"
- def __init__(self, text="Click", icon="", **kwargs):
+ gliph = Unicode("").tag(sync=True)
+ "traitlet.Unicode: the name of the icon"
+
+ msg = Unicode("").tag(sync=True)
+ "traitlet.Unicode: the text of the btn"
+
+ def __init__(self, msg="Click", gliph="", **kwargs):
+
+ # deprecation in 2.13 of text and icon
+ # as they already exist in the ipyvuetify Btn traits (as booleans)
+ if "text" in kwargs:
+ if isinstance(kwargs["text"], str):
+ msg = kwargs.pop("text")
+ warnings.warn(
+ '"text" is deprecated, please use "msg" instead', DeprecationWarning
+ )
+ if "icon" in kwargs:
+ if isinstance(kwargs["icon"], str):
+ gliph = kwargs.pop("icon")
+ warnings.warn(
+ '"icon" is deprecated, please use "gliph" instead',
+ DeprecationWarning,
+ )
# create the default v_icon
self.v_icon = v.Icon(left=True, children=[""])
- self.set_icon(icon)
# set the default parameters
kwargs["color"] = kwargs.pop("color", "primary")
- kwargs["children"] = [self.v_icon, text]
+ kwargs["children"] = [self.v_icon, self.msg]
# call the constructor
super().__init__(**kwargs)
+ self.gliph = gliph
+ self.msg = msg
+
+ @observe("gliph")
+ def _set_gliph(self, change):
+ """
+ Set a new icon. If the icon is set to "", then it's hidden
+ """
+ new_gliph = change["new"]
+ self.v_icon.children = [new_gliph]
+
+ # hide the component to avoid the right padding
+ if not new_gliph:
+ su.hide_component(self.v_icon)
+ else:
+ su.show_component(self.v_icon)
+
+ return self
+
+ @observe("msg")
+ def _set_text(self, change):
+ """
+ Set the text of the btn
+ """
+
+ self.children = [self.v_icon, change["new"]]
+
+ return self
+
+ @deprecated(version="2.14", reason="Replace by the private _set_gliph")
def set_icon(self, icon=""):
"""
set a new icon. If the icon is set to "", then it's hidden.
@@ -45,13 +104,7 @@ class Btn(v.Btn, SepalWidget):
Return:
self
"""
- self.v_icon.children = [icon]
-
- if not icon:
- su.hide_component(self.v_icon)
- else:
- su.show_component(self.v_icon)
-
+ self.gliph = icon
return self
def toggle_loading(self):
diff --git a/sepal_ui/sepalwidgets/inputs.py b/sepal_ui/sepalwidgets/inputs.py
index 5a9507ad..1bb0e850 100644
--- a/sepal_ui/sepalwidgets/inputs.py
+++ b/sepal_ui/sepalwidgets/inputs.py
@@ -256,7 +256,7 @@ class FileInput(v.Flex, SepalWidget):
"name": "activator",
"variable": "x",
"children": Btn(
- icon="fas fa-search", v_model=False, v_on="x.on", text=label
+ gliph="fas fa-search", v_model=False, v_on="x.on", msg=label
),
}
],
|
12rambau__sepal_ui-644
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/btn.py:Btn.__init__",
"sepal_ui/sepalwidgets/btn.py:Btn._set_text"
],
"edited_modules": [
"sepal_ui/sepalwidgets/btn.py:Btn"
]
},
"file": "sepal_ui/sepalwidgets/btn.py"
}
] |
12rambau/sepal_ui
|
8a8196e3c7893b7a0aebdb4910e83054f59e0374
|
sepal_ui.Btn does't work as expected
I want to create a simple Icon button, to do so:
```python
sw.Btn(icon=True, gliph ="mdi-plus")
```
Doing this, without "msg" parameter will add the default text to the button which is "click", I think is worthless having that value.
So if I want to remove the default text, I would expect doing this:
```python
sw.Btn(children = [""], icon=True, gliph ="mdi-plus")
# or
sw.Btn(msg= ""] icon=True, gliph ="mdi-plus")
```
Which leads the icon aligned to the left and not centered (as it is using a empyt string as message).
|
diff --git a/sepal_ui/sepalwidgets/btn.py b/sepal_ui/sepalwidgets/btn.py
index 137622fa..105f6160 100644
--- a/sepal_ui/sepalwidgets/btn.py
+++ b/sepal_ui/sepalwidgets/btn.py
@@ -25,6 +25,9 @@ class Btn(v.Btn, SepalWidget):
.. deprecated:: 2.13
``text`` and ``icon`` will be replaced by ``msg`` and ``gliph`` to avoid duplicating ipyvuetify trait.
+
+ .. deprecated:: 2.14
+ Btn is not using a default ``msg`` anymor`.
"""
v_icon = None
@@ -36,7 +39,7 @@ class Btn(v.Btn, SepalWidget):
msg = Unicode("").tag(sync=True)
"traitlet.Unicode: the text of the btn"
- def __init__(self, msg="Click", gliph="", **kwargs):
+ def __init__(self, msg="", gliph="", **kwargs):
# deprecation in 2.13 of text and icon
# as they already exist in the ipyvuetify Btn traits (as booleans)
@@ -55,7 +58,7 @@ class Btn(v.Btn, SepalWidget):
)
# create the default v_icon
- self.v_icon = v.Icon(left=True, children=[""])
+ self.v_icon = v.Icon(children=[""])
# set the default parameters
kwargs["color"] = kwargs.pop("color", "primary")
@@ -89,6 +92,7 @@ class Btn(v.Btn, SepalWidget):
Set the text of the btn
"""
+ self.v_icon.left = bool(change["new"])
self.children = [self.v_icon, change["new"]]
return self
|
12rambau__sepal_ui-646
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/alert.py:Alert.update_progress"
],
"edited_modules": [
"sepal_ui/sepalwidgets/alert.py:Alert"
]
},
"file": "sepal_ui/sepalwidgets/alert.py"
}
] |
12rambau/sepal_ui
|
8a8196e3c7893b7a0aebdb4910e83054f59e0374
|
allow other values for progress
Now that we are supporting tqdm it should be possible to support progress values that are not between 0 and 1. https://github.com/12rambau/sepal_ui/blob/c15a83dc6c92d076e6932afab4e4b2987585894b/sepal_ui/sepalwidgets/alert.py#L98
|
diff --git a/sepal_ui/sepalwidgets/alert.py b/sepal_ui/sepalwidgets/alert.py
index 68e3f115..de6d4abb 100644
--- a/sepal_ui/sepalwidgets/alert.py
+++ b/sepal_ui/sepalwidgets/alert.py
@@ -94,9 +94,10 @@ class Alert(v.Alert, SepalWidget):
self.show()
# cast the progress to float
+ total = tqdm_args.get("total", 1)
progress = float(progress)
- if not (0 <= progress <= 1):
- raise ValueError(f"progress should be in [0, 1], {progress} given")
+ if not (0 <= progress <= total):
+ raise ValueError(f"progress should be in [0, {total}], {progress} given")
# Prevent adding multiple times
if self.progress_output not in self.children:
@@ -107,7 +108,7 @@ class Alert(v.Alert, SepalWidget):
"bar_format", "{l_bar}{bar}{n_fmt}/{total_fmt}"
)
tqdm_args["dynamic_ncols"] = tqdm_args.pop("dynamic_ncols", tqdm_args)
- tqdm_args["total"] = tqdm_args.pop("total", 100)
+ tqdm_args["total"] = tqdm_args.pop("total", 1)
tqdm_args["desc"] = tqdm_args.pop("desc", msg)
tqdm_args["colour"] = tqdm_args.pop("tqdm_args", getattr(color, self.type))
@@ -120,7 +121,7 @@ class Alert(v.Alert, SepalWidget):
# Initialize bar
self.progress_bar.update(0)
- self.progress_bar.update(progress * 100 - self.progress_bar.n)
+ self.progress_bar.update(progress - self.progress_bar.n)
if progress == 1:
self.progress_bar.close()
|
12rambau__sepal_ui-747
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/sepalwidget.py:SepalWidget.get_children"
],
"edited_modules": [
"sepal_ui/sepalwidgets/sepalwidget.py:SepalWidget"
]
},
"file": "sepal_ui/sepalwidgets/sepalwidget.py"
}
] |
12rambau/sepal_ui
|
a683a7665a9710acd5ca939308e18539e92014b7
|
make get_children recursively again
previous implementation used recursion to find all children within the widget that matches with the query, now it returns only first level of matching children, could we make it reclusively again?
|
diff --git a/sepal_ui/sepalwidgets/sepalwidget.py b/sepal_ui/sepalwidgets/sepalwidget.py
index 40826809..00cbe015 100644
--- a/sepal_ui/sepalwidgets/sepalwidget.py
+++ b/sepal_ui/sepalwidgets/sepalwidget.py
@@ -177,11 +177,11 @@ class SepalWidget(v.VuetifyWidget):
is_klass = isinstance(w, klass)
is_val = w.attributes.get(attr, "niet") == value if attr and value else True
- # asumption: searched element won't be nested inside one another
if is_klass and is_val:
elements.append(w)
- else:
- elements = self.get_children(w, klass, attr, value, id_, elements)
+
+ # always search for nested elements
+ elements = self.get_children(w, klass, attr, value, id_, elements)
return elements
|
12rambau__sepal_ui-758
|
[
{
"changes": {
"added_entities": [
"sepal_ui/sepalwidgets/inputs.py:DatePicker.today"
],
"added_modules": null,
"edited_entities": null,
"edited_modules": [
"sepal_ui/sepalwidgets/inputs.py:DatePicker"
]
},
"file": "sepal_ui/sepalwidgets/inputs.py"
}
] |
12rambau/sepal_ui
|
27a18eba37bec8ef1cabfa6bcc4022164ebc4c3b
|
add a today() method for the datepicker
It's something I do a lot, setting up the datepicker to today as:
```python
from sepal_ui import sepawidgets as sw
from datetime import datetime
dp = sw.Datepicker()
# do stulff and as a fallback do
dp.v_model = datetime.today().strftime("%Y-%m-%d")
```
Instead I would love to have something like:
```python
dp.today()
```
what do you think ?
|
diff --git a/sepal_ui/sepalwidgets/inputs.py b/sepal_ui/sepalwidgets/inputs.py
index 04e69553..0cb7a9bf 100644
--- a/sepal_ui/sepalwidgets/inputs.py
+++ b/sepal_ui/sepalwidgets/inputs.py
@@ -156,6 +156,12 @@ class DatePicker(v.Layout, SepalWidget):
return
+ def today(self) -> Self:
+ """Update the date to the current day."""
+ self.v_model = datetime.today().strftime("%Y-%m-%d")
+
+ return self
+
@staticmethod
def is_valid_date(date: str) -> bool:
"""
|
12rambau__sepal_ui-774
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/inputs.py:FileInput.__init__",
"sepal_ui/sepalwidgets/inputs.py:FileInput._get_items"
],
"edited_modules": [
"sepal_ui/sepalwidgets/inputs.py:FileInput"
]
},
"file": "sepal_ui/sepalwidgets/inputs.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/sepalwidget.py:SepalWidget.get_children"
],
"edited_modules": [
"sepal_ui/sepalwidgets/sepalwidget.py:SepalWidget"
]
},
"file": "sepal_ui/sepalwidgets/sepalwidget.py"
}
] |
12rambau/sepal_ui
|
2576446debe3544f3edeb208c76f671ffc0c8650
|
Restrict maximum parent level from InputFile
I have some apps where I’m interested on only search up to certain level, i.e., module_downloads, and I think that in the most of them, the user doesn’t need to go upper from sepal_user, once they start clicking, they could easily get lost over multiple folders.
what if we implement a parameter called: max_depth? We could use int values to this parameter, what do you think?
|
diff --git a/sepal_ui/sepalwidgets/inputs.py b/sepal_ui/sepalwidgets/inputs.py
index 04e69553..cd561bb5 100644
--- a/sepal_ui/sepalwidgets/inputs.py
+++ b/sepal_ui/sepalwidgets/inputs.py
@@ -205,6 +205,9 @@ class FileInput(v.Flex, SepalWidget):
clear: Optional[v.Btn] = None
"clear btn to remove everything and set back to the ini folder"
+ root: t.Unicode = t.Unicode("").tag(sync=True)
+ "the root folder from which you cannot go higher in the tree."
+
v_model: t.Unicode = t.Unicode(None, allow_none=True).tag(sync=True)
"the v_model of the input"
@@ -218,6 +221,7 @@ class FileInput(v.Flex, SepalWidget):
label: str = ms.widgets.fileinput.label,
v_model: Union[str, None] = "",
clearable: bool = False,
+ root: Union[str, Path] = "",
**kwargs,
) -> None:
"""
@@ -229,10 +233,12 @@ class FileInput(v.Flex, SepalWidget):
label: the label of the input
v_model: the default value
clearable: wether or not to make the widget clearable. default to False
+ root: the root folder from which you cannot go higher in the tree.
kwargs: any parameter from a v.Flex abject. If set, 'children' will be overwritten.
"""
self.extentions = extentions
self.folder = Path(folder)
+ self.root = str(root) if isinstance(root, Path) else root
self.selected_file = v.TextField(
readonly=True,
@@ -441,7 +447,10 @@ class FileInput(v.Flex, SepalWidget):
folder_list = humansorted(folder_list, key=lambda x: x.value)
file_list = humansorted(file_list, key=lambda x: x.value)
+ folder_list.extend(file_list)
+ # add the parent item if root is set and is not reached yet
+ # if root is not set then we always display it
parent_item = v.ListItem(
value=str(folder.parent),
children=[
@@ -458,9 +467,11 @@ class FileInput(v.Flex, SepalWidget):
),
],
)
-
- folder_list.extend(file_list)
- folder_list.insert(0, parent_item)
+ root_folder = Path(self.root)
+ if self.root == "":
+ folder_list.insert(0, parent_item)
+ elif root_folder in folder.parents:
+ folder_list.insert(0, parent_item)
return folder_list
diff --git a/sepal_ui/sepalwidgets/sepalwidget.py b/sepal_ui/sepalwidgets/sepalwidget.py
index 79428748..d9d10451 100644
--- a/sepal_ui/sepalwidgets/sepalwidget.py
+++ b/sepal_ui/sepalwidgets/sepalwidget.py
@@ -13,7 +13,7 @@ Example:
"""
import warnings
-from typing import Optional, Union
+from typing import List, Optional, Union
import ipyvuetify as v
import traitlets as t
@@ -125,7 +125,7 @@ class SepalWidget(v.VuetifyWidget):
value: str = "",
id_: str = "",
elements: Optional[list] = None,
- ) -> list:
+ ) -> List[v.VuetifyWidget]:
r"""
Recursively search for every element matching the specifications.
|
12rambau__sepal_ui-814
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/sepalwidgets/alert.py:Alert.update_progress"
],
"edited_modules": [
"sepal_ui/sepalwidgets/alert.py:Alert"
]
},
"file": "sepal_ui/sepalwidgets/alert.py"
}
] |
12rambau/sepal_ui
|
6d825ae167f96ad2e7b76b96ca07de562f74dcf0
|
avoid to force developer to set total each time
I should be able to init the progress of an Alert first and then simply update the progress.
as in:
```python
from sepal_ui import sepalwidgets as sw
alert = sw.Alert()
# init
alert.update_progress(0, "toto", total=10)
# loop
for i in range(10):
alert.update_progress(i)
```
in the current implemetnation, total need to be set in every calls
|
diff --git a/sepal_ui/sepalwidgets/alert.py b/sepal_ui/sepalwidgets/alert.py
index 19718f51..8dafab92 100644
--- a/sepal_ui/sepalwidgets/alert.py
+++ b/sepal_ui/sepalwidgets/alert.py
@@ -108,14 +108,17 @@ class Alert(v.Alert, SepalWidget):
Args:
progress: the progress status in float
msg: The message to use before the progress bar
- tqdm_args (optional): any arguments supported by a tqdm progress bar
+ tqdm_args (optional): any arguments supported by a tqdm progress bar, they will only be taken into account after a call to ``self.reset()``.
"""
# show the alert
self.show()
- # cast the progress to float
- total = tqdm_args.get("total", 1)
+ # cast the progress to float and perform sanity checks
progress = float(progress)
+ if self.progress_output not in self.children:
+ total = tqdm_args.get("total", 1)
+ else:
+ total = self.progress_bar.total
if not (0 <= progress <= total):
raise ValueError(f"progress should be in [0, {total}], {progress} given")
|
12rambau__sepal_ui-896
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"sepal_ui/planetapi/planet_model.py:PlanetModel.init_session",
"sepal_ui/planetapi/planet_model.py:PlanetModel.get_mosaics",
"sepal_ui/planetapi/planet_model.py:PlanetModel.get_quad"
],
"edited_modules": [
"sepal_ui/planetapi/planet_model.py:PlanetModel"
]
},
"file": "sepal_ui/planetapi/planet_model.py"
},
{
"changes": {
"added_entities": [
"sepal_ui/planetapi/planet_view.py:PlanetView.validate_secret_file",
"sepal_ui/planetapi/planet_view.py:PlanetView.set_initial_method"
],
"added_modules": null,
"edited_entities": [
"sepal_ui/planetapi/planet_view.py:PlanetView.__init__",
"sepal_ui/planetapi/planet_view.py:PlanetView.reset",
"sepal_ui/planetapi/planet_view.py:PlanetView._swap_inputs",
"sepal_ui/planetapi/planet_view.py:PlanetView.validate"
],
"edited_modules": [
"sepal_ui/planetapi/planet_view.py:PlanetView"
]
},
"file": "sepal_ui/planetapi/planet_view.py"
}
] |
12rambau/sepal_ui
|
b91b2a2c45b4fa80a7a0c699df978ebc46682260
|
get_mosaics from planet api fails
`get_mosaics` --and probably-- `get_quads` fails when the authentication process is done `from_login`... test has been passed because we are only testing the initialization of the `Planet` `from_key` but not to get elements from it
|
diff --git a/sepal_ui/message/en/locale.json b/sepal_ui/message/en/locale.json
index abdea912..b65a232d 100644
--- a/sepal_ui/message/en/locale.json
+++ b/sepal_ui/message/en/locale.json
@@ -85,14 +85,17 @@
"exception": {
"empty": "Please fill the required field(s).",
"invalid": "Invalid email or password",
- "nosubs": "Your credentials do not have any valid planet subscription."
+ "nosubs": "Your credentials do not have any valid planet subscription.",
+ "no_secret_file": "The credentials file does not exist, use a different login method."
},
"widget": {
"username": "Planet username",
"password": "Planet password",
"apikey": "Planet API key",
+ "store": "Remember credentials file in the session.",
"method": {
"label": "Login method",
+ "from_file": "From saved credentials",
"credentials": "Credentials",
"api_key": "Planet API key"
}
diff --git a/sepal_ui/planetapi/planet_model.py b/sepal_ui/planetapi/planet_model.py
index 7aee2ed4..c3939499 100644
--- a/sepal_ui/planetapi/planet_model.py
+++ b/sepal_ui/planetapi/planet_model.py
@@ -6,8 +6,8 @@ from typing import Dict, List, Optional, Union
import nest_asyncio
import planet.data_filter as filters
-import requests
import traitlets as t
+from deprecated.sphinx import deprecated
from planet import DataClient
from planet.auth import Auth
from planet.exceptions import NoPermission
@@ -21,7 +21,6 @@ nest_asyncio.apply()
class PlanetModel(Model):
-
SUBS_URL: str = (
"https://api.planet.com/auth/v1/experimental/public/my/subscriptions"
)
@@ -56,11 +55,18 @@ class PlanetModel(Model):
if credentials:
self.init_session(credentials)
- def init_session(self, credentials: Union[str, List[str]]) -> None:
+ @deprecated(
+ version="3.0",
+ reason="credentials member is deprecated, use self.auth._key instead",
+ )
+ def init_session(
+ self, credentials: Union[str, List[str]], write_secrets: bool = False
+ ) -> None:
"""Initialize planet client with api key or credentials. It will handle errors.
Args:
- credentials: planet API key or username and password pair of planet explorer.
+ credentials: planet API key, username and password pair or a secrets planet.json file.
+ write_secrets: either to write the credentials in the secret file or not. Defaults to True.
"""
if isinstance(credentials, str):
credentials = [credentials]
@@ -70,13 +76,21 @@ class PlanetModel(Model):
if len(credentials) == 2:
self.auth = Auth.from_login(*credentials)
+
+ # Check if the str is a path to a secret file
+ elif len(credentials) == 1 and credentials[0].endswith(".json"):
+ self.auth = Auth.from_file(credentials[0])
+
else:
self.auth = Auth.from_key(credentials[0])
- self.credentials = credentials
+ self.credentials = self.auth._key
self.session = Session(auth=self.auth)
self._is_active()
+ if self.active and write_secrets:
+ self.auth.store()
+
return
def _is_active(self) -> None:
@@ -213,10 +227,11 @@ class PlanetModel(Model):
"quad_download": true
}
"""
- url = "https://api.planet.com/basemaps/v1/mosaics?api_key={}"
- res = requests.get(url.format(self.credentials[0]))
+ mosaics_url = "https://api.planet.com/basemaps/v1/mosaics"
+ request = self.session.request("GET", mosaics_url)
+ response = asyncio.run(request)
- return res.json().get("mosaics", [])
+ return response.json().get("mosaics", [])
def get_quad(self, mosaic: dict, quad_id: str) -> dict:
"""Get a quad response for a specific mosaic and quad.
@@ -245,10 +260,13 @@ class PlanetModel(Model):
"percent_covered": 100
}
"""
- url = "https://api.planet.com/basemaps/v1/mosaics/{}/quads/{}?api_key={}"
- res = requests.get(url.format(mosaic["id"], quad_id, self.credentials[0]))
+ quads_url = "https://api.planet.com/basemaps/v1/mosaics/{}/quads/{}"
+ quads_url = quads_url.format(mosaic["id"], quad_id)
+
+ request = self.session.request("GET", quads_url)
+ response = asyncio.run(request)
- return res.json() or {}
+ return response.json() or {}
@staticmethod
def search_status(d: dict) -> List[Dict[str, bool]]:
diff --git a/sepal_ui/planetapi/planet_view.py b/sepal_ui/planetapi/planet_view.py
index 1de11832..72f3d771 100644
--- a/sepal_ui/planetapi/planet_view.py
+++ b/sepal_ui/planetapi/planet_view.py
@@ -1,5 +1,6 @@
"""The ``Card`` widget to use in application to interface with Planet."""
+from pathlib import Path
from typing import Optional
import ipyvuetify as v
@@ -12,7 +13,6 @@ from sepal_ui.scripts.decorator import loading_button
class PlanetView(sw.Layout):
-
planet_model: Optional[PlanetModel] = None
"Backend model to manipulate interface actions"
@@ -47,7 +47,7 @@ class PlanetView(sw.Layout):
):
"""Stand-alone interface to capture planet lab credentials.
- It also validate its subscription and connect to the client stored in the model.
+ It also validate its subscription and connect to the client from_file in the model.
Args:
btn (sw.Btn, optional): Button to trigger the validation process in the associated model.
@@ -67,18 +67,27 @@ class PlanetView(sw.Layout):
)
self.w_password = sw.PasswordField(label=ms.planet.widget.password)
self.w_key = sw.PasswordField(label=ms.planet.widget.apikey, v_model="").hide()
+ self.w_secret_file = sw.TextField(
+ label=ms.planet.widget.store,
+ v_model=str(Path.home() / ".planet.json"),
+ readonly=True,
+ class_="mr-2",
+ ).hide()
self.w_info_view = InfoView(model=self.planet_model)
self.w_method = v.Select(
label=ms.planet.widget.method.label,
class_="mr-2",
- v_model="credentials",
+ v_model="",
items=[
+ {"value": "from_file", "text": ms.planet.widget.method.from_file},
{"value": "credentials", "text": ms.planet.widget.method.credentials},
{"value": "api_key", "text": ms.planet.widget.method.api_key},
],
)
+ self.w_store = sw.Checkbox(label=ms.planet.widget.store, v_model=True)
+
w_validation = v.Flex(
style_="flex-grow: 0 !important;",
children=[self.btn],
@@ -87,17 +96,22 @@ class PlanetView(sw.Layout):
self.children = [
self.w_method,
sw.Layout(
+ attributes={"id": "planet_credentials"},
class_="align-center",
children=[
self.w_username,
self.w_password,
self.w_key,
+ self.w_secret_file,
],
),
+ self.w_store,
]
if not btn:
- self.children[-1].set_children(w_validation, "last")
+ self.get_children(attr="id", value="planet_credentials")[0].set_children(
+ w_validation, "last"
+ )
# Set it here to avoid displacements when using button
self.set_children(self.w_info_view, "last")
@@ -108,36 +122,82 @@ class PlanetView(sw.Layout):
self.w_method.observe(self._swap_inputs, "v_model")
self.btn.on_event("click", self.validate)
+ self.set_initial_method()
+
+ def validate_secret_file(self) -> None:
+ """Validate the secret file path."""
+ if not Path(self.w_secret_file.v_model).exists():
+ self.w_secret_file.error_messages = [ms.planet.exception.no_secret_file]
+ return False
+
+ self.w_secret_file.error_messages = []
+ return True
+
+ def set_initial_method(self) -> None:
+ """Set the initial method to connect to planet lab."""
+ self.w_method.v_model = (
+ "from_file" if self.validate_secret_file() else "credentials"
+ )
+
def reset(self) -> None:
"""Empty credentials fields and restart activation mode."""
- self.w_username.v_model = None
- self.w_password.v_model = None
- self.w_key.v_model = None
+ self.w_username.v_model = ""
+ self.w_password.v_model = ""
+ self.w_key.v_model = ""
self.planet_model.__init__()
return
def _swap_inputs(self, change: dict) -> None:
- """Swap between credentials and api key inputs."""
+ """Swap between credentials and api key inputs.
+
+ Args:
+ change.new: values of from_file, credentials, api_key
+ """
self.alert.reset()
self.reset()
- self.w_username.toggle_viz()
- self.w_password.toggle_viz()
- self.w_key.toggle_viz()
+ # small detail, but validate the file every time the method is changed
+ self.validate_secret_file()
+
+ if change["new"] == "credentials":
+ self.w_username.show()
+ self.w_password.show()
+ self.w_secret_file.hide()
+ self.w_store.show()
+ self.w_key.hide()
+
+ elif change["new"] == "api_key":
+ self.w_username.hide()
+ self.w_password.hide()
+ self.w_secret_file.hide()
+ self.w_store.show()
+ self.w_key.show()
+ else:
+ self.w_username.hide()
+ self.w_password.hide()
+ self.w_key.hide()
+ self.w_store.hide()
+ self.w_secret_file.show()
return
- @loading_button(debug=True)
+ @loading_button()
def validate(self, *args) -> None:
"""Initialize planet client and validate if is active."""
self.planet_model.__init__()
if self.w_method.v_model == "credentials":
credentials = [self.w_username.v_model, self.w_password.v_model]
+
+ elif self.w_method.v_model == "api_key":
+ credentials = self.w_key.v_model
+
else:
- credentials = [self.w_key.v_model]
+ if not self.validate_secret_file():
+ raise Exception(ms.planet.exception.no_secret_file)
+ credentials = self.w_secret_file.v_model
- self.planet_model.init_session(credentials)
+ self.planet_model.init_session(credentials, write_secrets=self.w_store.v_model)
return
|
15five__scim2-filter-parser-13
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": null
},
"file": "setup.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"src/scim2_filter_parser/transpilers/sql.py:Transpiler.visit_AttrPath"
],
"edited_modules": [
"src/scim2_filter_parser/transpilers/sql.py:Transpiler"
]
},
"file": "src/scim2_filter_parser/transpilers/sql.py"
}
] |
15five/scim2-filter-parser
|
3ed1858b492542d0bc9b9e9ab9547641595e28c1
|
Return NamedTuple rather than tuple.
It would be nice to return a NamedTuple instead of a tuple here:
https://github.com/15five/scim2-filter-parser/blob/7ddc216f8c3dd1cdb2152944187e8f7f5ee07be2/src/scim2_filter_parser/transpilers/sql.py#L148
This way parts of each path could be accessed by name rather than by index in the tuple.
|
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 12a5d4f..178f172 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,6 +1,10 @@
CHANGE LOG
==========
+0.3.5
+-----
+- Update the sql.Transpiler to collect namedtuples rather than tuples for attr paths
+
0.3.4
-----
- Update tox.ini and clean up linting errors
diff --git a/setup.py b/setup.py
index bbf57bf..bd16f70 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ def long_description():
setup(
name='scim2-filter-parser',
- version='0.3.4',
+ version='0.3.5',
description='A customizable parser/transpiler for SCIM2.0 filters',
url='https://github.com/15five/scim2-filter-parser',
maintainer='Paul Logston',
diff --git a/src/scim2_filter_parser/transpilers/sql.py b/src/scim2_filter_parser/transpilers/sql.py
index 6254f1e..2107758 100644
--- a/src/scim2_filter_parser/transpilers/sql.py
+++ b/src/scim2_filter_parser/transpilers/sql.py
@@ -4,9 +4,12 @@ clause based on a SCIM filter.
"""
import ast
import string
+import collections
from .. import ast as scim2ast
+AttrPath = collections.namedtuple('AttrPath', ['attr_name', 'sub_attr', 'uri'])
+
class Transpiler(ast.NodeTransformer):
"""
@@ -145,7 +148,7 @@ class Transpiler(ast.NodeTransformer):
# Convert attr_name to another value based on map.
# Otherwise, return None.
- attr_path_tuple = (attr_name_value, sub_attr_value, uri_value)
+ attr_path_tuple = AttrPath(attr_name_value, sub_attr_value, uri_value)
self.attr_paths.append(attr_path_tuple)
return self.attr_map.get(attr_path_tuple)
|
15five__scim2-filter-parser-20
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": [
"src/scim2_filter_parser/parser.py:SCIMParser"
]
},
"file": "src/scim2_filter_parser/parser.py"
}
] |
15five/scim2-filter-parser
|
08de23c5626556a37beced764a22a2fa7021989b
|
Issue when using multiple "or" or "and"
Hi,
I am facing an issue, where the query having two or more "and" or more than two "or" is failing.
Have a look at examples below: -
1)```"displayName co \"username\" or nickName co \"username\" or userName co \"username\""```
```"displayName co \"username\" and nickName co \"username\" and userName co \"username\""```
the two queries fails giving ,
```scim2_filter_parser.parser.SCIMParserError: Parsing error at: Token(type='OR', value='or', lineno=1, index=52)```
notice above queries are having either only "or" or "and".
2)```"displayName co \"username\" and nickName co \"username\" or userName co \"username\""```
but this query works.
|
diff --git a/src/scim2_filter_parser/parser.py b/src/scim2_filter_parser/parser.py
index 516f65d..12c693e 100644
--- a/src/scim2_filter_parser/parser.py
+++ b/src/scim2_filter_parser/parser.py
@@ -110,9 +110,8 @@ class SCIMParser(Parser):
# which takes precedence over "or"
# 3. Attribute operators
precedence = (
- ('nonassoc', OR), # noqa F821
- ('nonassoc', AND), # noqa F821
- ('nonassoc', NOT), # noqa F821
+ ('left', OR, AND), # noqa F821
+ ('right', NOT), # noqa F821
)
# FILTER = attrExp / logExp / valuePath / *1"not" "(" FILTER ")"
|
15five__scim2-filter-parser-31
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": null
},
"file": "setup.py"
},
{
"changes": {
"added_entities": [
"src/scim2_filter_parser/ast.py:AttrPath.case_insensitive",
"src/scim2_filter_parser/ast.py:AttrExpr.case_insensitive"
],
"added_modules": null,
"edited_entities": null,
"edited_modules": [
"src/scim2_filter_parser/ast.py:AttrPath",
"src/scim2_filter_parser/ast.py:AttrExpr"
]
},
"file": "src/scim2_filter_parser/ast.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"src/scim2_filter_parser/transpilers/django_q_object.py:Transpiler.is_filter",
"src/scim2_filter_parser/transpilers/django_q_object.py:Transpiler.visit_AttrExpr",
"src/scim2_filter_parser/transpilers/django_q_object.py:Transpiler.visit_AttrExprValue"
],
"edited_modules": [
"src/scim2_filter_parser/transpilers/django_q_object.py:Transpiler"
]
},
"file": "src/scim2_filter_parser/transpilers/django_q_object.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"src/scim2_filter_parser/transpilers/sql.py:Transpiler.visit_AttrExpr",
"src/scim2_filter_parser/transpilers/sql.py:Transpiler.visit_AttrExprValue"
],
"edited_modules": [
"src/scim2_filter_parser/transpilers/sql.py:Transpiler"
]
},
"file": "src/scim2_filter_parser/transpilers/sql.py"
}
] |
15five/scim2-filter-parser
|
c794bf3e50e3cb71bdcf919feb43d11912907dd2
|
userName attribute should be case-insensitive, per the RFC
From https://github.com/15five/django-scim2/issues/76
> See https://datatracker.ietf.org/doc/html/rfc7643#section-4.1.1: (userName)
> This attribute is REQUIRED and is case insensitive.
> Currently this case-insensitive behavior is not implemented and the filter lookups are case-sensitive.
|
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 14f28e6..35eb5c5 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,5 +1,12 @@
CHANGE LOG
==========
+0.4.0
+-----
+- Update userName to be case insensitive. #31
+
+BREAKING CHANGE: This allows queries that did not match rows before to
+match rows now!
+
0.3.9
-----
diff --git a/setup.py b/setup.py
index fa62e03..c46c582 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ def long_description():
setup(
name='scim2-filter-parser',
- version='0.3.9',
+ version='0.4.0',
description='A customizable parser/transpiler for SCIM2.0 filters',
url='https://github.com/15five/scim2-filter-parser',
maintainer='Paul Logston',
diff --git a/src/scim2_filter_parser/ast.py b/src/scim2_filter_parser/ast.py
index b019f01..28de65e 100644
--- a/src/scim2_filter_parser/ast.py
+++ b/src/scim2_filter_parser/ast.py
@@ -95,6 +95,12 @@ class AttrPath(AST):
sub_attr : (SubAttr, type(None)) # noqa: E203
uri : (str, type(None)) # noqa: E203
+ @property
+ def case_insensitive(self):
+ # userName is always case-insensitive
+ # https://datatracker.ietf.org/doc/html/rfc7643#section-4.1.1
+ return self.attr_name == 'userName'
+
class CompValue(AST):
value : str # noqa: E203
@@ -105,6 +111,10 @@ class AttrExpr(AST):
attr_path : AttrPath # noqa: E203
comp_value : CompValue # noqa: E203
+ @property
+ def case_insensitive(self):
+ return self.attr_path.case_insensitive
+
# The following classes for visiting and rewriting the AST are taken
# from Python's ast module. It's really easy to make mistakes when
diff --git a/src/scim2_filter_parser/transpilers/django_q_object.py b/src/scim2_filter_parser/transpilers/django_q_object.py
index def4633..5ef90b2 100644
--- a/src/scim2_filter_parser/transpilers/django_q_object.py
+++ b/src/scim2_filter_parser/transpilers/django_q_object.py
@@ -139,13 +139,13 @@ class Transpiler(ast.NodeTransformer):
partial = partial.replace(".", "__")
if full and partial:
# Specific to Azure
- op, value = self.visit_AttrExprValue(node.value, node.comp_value)
+ op, value = self.visit_AttrExprValue(node)
key = partial + "__" + op
return full & Q(**{key: value})
elif full:
return full
elif partial:
- op, value = self.visit_AttrExprValue(node.value, node.comp_value)
+ op, value = self.visit_AttrExprValue(node)
key = partial + "__" + op
return Q(**{key: value})
else:
@@ -159,20 +159,20 @@ class Transpiler(ast.NodeTransformer):
return None
if "." in attr:
attr = attr.replace(".", "__")
- op, value = self.visit_AttrExprValue(node.value, node.comp_value)
+ op, value = self.visit_AttrExprValue(node)
key = attr + "__" + op
query = Q(**{key: value})
if node.value == "ne":
query = ~query
return query
- def visit_AttrExprValue(self, node_value, node_comp_value):
- op = self.lookup_op(node_value)
+ def visit_AttrExprValue(self, node):
+ op = self.lookup_op(node.value)
- if node_comp_value:
+ if node.comp_value:
# There is a comp_value, so visit node and build SQL.
# prep item_id to be a str replacement placeholder
- value = self.visit(node_comp_value)
+ value = self.visit(node.comp_value)
else:
value = None
diff --git a/src/scim2_filter_parser/transpilers/sql.py b/src/scim2_filter_parser/transpilers/sql.py
index 0c3fba4..7128edb 100644
--- a/src/scim2_filter_parser/transpilers/sql.py
+++ b/src/scim2_filter_parser/transpilers/sql.py
@@ -112,28 +112,38 @@ class Transpiler(ast.NodeTransformer):
if isinstance(node.attr_path.attr_name, scim2ast.Filter):
full, partial = self.visit_PartialAttrExpr(node.attr_path.attr_name)
if full and partial:
- value = self.visit_AttrExprValue(node.value, node.comp_value)
+ value = self.visit_AttrExprValue(node)
return f'({full} AND {partial} {value})'
elif full:
return full
elif partial:
- value = self.visit_AttrExprValue(node.value, node.comp_value)
+ value = self.visit_AttrExprValue(node)
return f'{partial} {value}'
else:
return None
else:
+ # Case-insensitivity only needs to be checked in this branch
+ # because userName is currently the only attribute that can be case
+ # insensitive and userName can not be a nested part of a complex query (eg.
+ # emails.type in emails[type eq "Primary"]...).
+ # https://datatracker.ietf.org/doc/html/rfc7643#section-4.1.1
attr = self.visit(node.attr_path)
if attr is None:
return None
- value = self.visit_AttrExprValue(node.value, node.comp_value)
+
+ value = self.visit_AttrExprValue(node)
+
+ if node.case_insensitive:
+ return f'UPPER({attr}) {value}'
+
return f'{attr} {value}'
- def visit_AttrExprValue(self, node_value, node_comp_value):
- op_sql = self.lookup_op(node_value)
+ def visit_AttrExprValue(self, node):
+ op_sql = self.lookup_op(node.value)
item_id = self.get_next_id()
- if not node_comp_value:
+ if not node.comp_value:
self.params[item_id] = None
return op_sql
@@ -142,15 +152,19 @@ class Transpiler(ast.NodeTransformer):
# prep item_id to be a str replacement placeholder
item_id_placeholder = '{' + item_id + '}'
- if 'LIKE' == op_sql:
+ if node.value.lower() in self.matching_op_by_scim_op.keys():
# Add appropriate % signs to values in LIKE clause
- prefix, suffix = self.lookup_like_matching(node_value)
- value = prefix + self.visit(node_comp_value) + suffix
+ prefix, suffix = self.lookup_like_matching(node.value)
+ value = prefix + self.visit(node.comp_value) + suffix
+
else:
- value = self.visit(node_comp_value)
+ value = self.visit(node.comp_value)
self.params[item_id] = value
+ if node.case_insensitive:
+ return f'{op_sql} UPPER({item_id_placeholder})'
+
return f'{op_sql} {item_id_placeholder}'
def visit_AttrPath(self, node):
|
20c__ctl-3
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"src/ctl/plugins/pypi.py:PyPIPlugin.dist_path",
"src/ctl/plugins/pypi.py:PyPIPlugin.prepare"
],
"edited_modules": [
"src/ctl/plugins/pypi.py:PyPIPluginConfig",
"src/ctl/plugins/pypi.py:PyPIPlugin"
]
},
"file": "src/ctl/plugins/pypi.py"
},
{
"changes": {
"added_entities": [
"src/ctl/plugins/release.py:ReleasePlugin.set_repository"
],
"added_modules": null,
"edited_entities": [
"src/ctl/plugins/release.py:ReleasePlugin.add_arguments",
"src/ctl/plugins/release.py:ReleasePlugin.execute",
"src/ctl/plugins/release.py:ReleasePlugin.set_target"
],
"edited_modules": [
"src/ctl/plugins/release.py:ReleasePluginConfig",
"src/ctl/plugins/release.py:ReleasePlugin"
]
},
"file": "src/ctl/plugins/release.py"
}
] |
20c/ctl
|
879af37647e61767a1ede59ffd353e4cfd27cd6f
|
PyPI plugin: `target` config attribute should be `repository`
This is so it's in line with the version plugin, which currently uses `repository` to specify the target repository
The pypi plugin currently uses `repository` to specify which PyPI repository to use, this should change to `pypi_repository` as well.
Should do this before tagging 1.0.0 since it's a config schema change
|
diff --git a/src/ctl/plugins/pypi.py b/src/ctl/plugins/pypi.py
index 5d979af..a6117af 100644
--- a/src/ctl/plugins/pypi.py
+++ b/src/ctl/plugins/pypi.py
@@ -32,7 +32,7 @@ class PyPIPluginConfig(release.ReleasePluginConfig):
config_file = confu.schema.Str(help="path to pypi config file (e.g. ~/.pypirc)")
# PyPI repository name, needs to exist in your pypi config file
- repository = confu.schema.Str(
+ pypi_repository = confu.schema.Str(
help="PyPI repository name - needs to exist " "in your pypi config file",
default="pypi",
)
@@ -55,16 +55,16 @@ class PyPIPlugin(release.ReleasePlugin):
@property
def dist_path(self):
- return os.path.join(self.target.checkout_path, "dist", "*")
+ return os.path.join(self.repository.checkout_path, "dist", "*")
def prepare(self):
super(PyPIPlugin, self).prepare()
self.shell = True
- self.repository = self.get_config("repository")
+ self.pypi_repository = self.get_config("pypi_repository")
self.pypirc_path = os.path.expanduser(self.config.get("config_file"))
self.twine_settings = Settings(
config_file=self.pypirc_path,
- repository_name=self.repository,
+ repository_name=self.pypi_repository,
sign=self.get_config("sign"),
identity=self.get_config("identity"),
sign_with=self.get_config("sign_with"),
diff --git a/src/ctl/plugins/release.py b/src/ctl/plugins/release.py
index bcfa1ce..dcae2f4 100644
--- a/src/ctl/plugins/release.py
+++ b/src/ctl/plugins/release.py
@@ -18,8 +18,8 @@ import ctl.plugins.git
class ReleasePluginConfig(confu.schema.Schema):
- target = confu.schema.Str(
- help="target for release - should be a path "
+ repository = confu.schema.Str(
+ help="repository target for release - should be a path "
"to a python package or the name of a "
"repository type plugin",
cli=False,
@@ -46,16 +46,16 @@ class ReleasePlugin(command.CommandPlugin):
"version",
nargs=1,
type=str,
- help="release version - if target is managed by git, "
+ help="release version - if repository is managed by git, "
"checkout this branch/tag",
)
group.add_argument(
- "target",
+ "repository",
nargs="?",
type=str,
- default=plugin_config.get("target"),
- help=ReleasePluginConfig().target.help,
+ default=plugin_config.get("repository"),
+ help=ReleasePluginConfig().repository.help,
)
sub = parser.add_subparsers(title="Operation", dest="op")
@@ -74,7 +74,7 @@ class ReleasePlugin(command.CommandPlugin):
return {
"group": group,
- "confu_target": op_release_parser,
+ "confu_repository": op_release_parser,
"op_release_parser": op_release_parser,
"op_validate_parser": op_validate_parser,
}
@@ -84,48 +84,48 @@ class ReleasePlugin(command.CommandPlugin):
self.prepare()
self.shell = True
- self.set_target(self.get_config("target"))
+ self.set_repository(self.get_config("repository"))
self.dry_run = kwargs.get("dry")
self.version = kwargs.get("version")[0]
- self.orig_branch = self.target.branch
+ self.orig_branch = self.repository.branch
if self.dry_run:
self.log.info("Doing dry run...")
- self.log.info("Release target: {}".format(self.target))
+ self.log.info("Release repository: {}".format(self.repository))
try:
- self.target.checkout(self.version)
+ self.repository.checkout(self.version)
op = self.get_op(kwargs.get("op"))
op(**kwargs)
finally:
- self.target.checkout(self.orig_branch)
+ self.repository.checkout(self.orig_branch)
- def set_target(self, target):
- if not target:
- raise ValueError("No target specified")
+ def set_repository(self, repository):
+ if not repository:
+ raise ValueError("No repository specified")
try:
- self.target = self.other_plugin(target)
- if not isinstance(self.target, ctl.plugins.repository.RepositoryPlugin):
+ self.repository = self.other_plugin(repository)
+ if not isinstance(self.repository, ctl.plugins.repository.RepositoryPlugin):
raise TypeError(
"The plugin with the name `{}` is not a "
"repository type plugin and cannot be used "
- "as a target".format(target)
+ "as a repository".format(repository)
)
except KeyError:
- self.target = os.path.abspath(target)
- if not os.path.exists(self.target):
+ self.repository = os.path.abspath(repository)
+ if not os.path.exists(self.repository):
raise IOError(
"Target is neither a configured repository "
"plugin nor a valid file path: "
- "{}".format(self.target)
+ "{}".format(self.repository)
)
- self.target = ctl.plugins.git.temporary_plugin(
- self.ctl, "{}__tmp_repo".format(self.plugin_name), self.target
+ self.repository = ctl.plugins.git.temporary_plugin(
+ self.ctl, "{}__tmp_repo".format(self.plugin_name), self.repository
)
- self.cwd = self.target.checkout_path
+ self.cwd = self.repository.checkout_path
@expose("ctl.{plugin_name}.release")
def release(self, **kwargs):
|
2gis__k8s-handle-120
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"k8s_handle/templating.py:Renderer._evaluate_tags"
],
"edited_modules": [
"k8s_handle/templating.py:Renderer"
]
},
"file": "k8s_handle/templating.py"
}
] |
2gis/k8s-handle
|
0ce48ecc5cd78eac5894241468a53080c3ccec64
|
skip-tags does not work
Hello,
it seems `--skip-tags` does not work. Steps to reproduce:
```
git clone git@github.com:2gis/k8s-handle-example.git
```
Edit config.yaml, add some tag
```
staging:
templates:
- template: configmap.yaml.j2
- template: deployment.yaml.j2
- template: service.yaml.j2
tags: manual
```
Render is empty:
```
$ docker run --rm -v `pwd`:/tmp -w /tmp 2gis/k8s-handle k8s-handle render -s staging --skip-tags manual
_(_)_ wWWWw _
@@@@ (_)@(_) vVVVv _ @@@@ (___) _(_)_
@@()@@ wWWWw (_)\ (___) _(_)_ @@()@@ Y (_)@(_)
@@@@ (___) `|/ Y (_)@(_) @@@@ \|/ (_)
/ Y \| \|/ /(_) \| |/ |
\ | \ |/ | / \ | / \|/ |/ \| \|/
\|// \|/// \|// \|/// \|/// \|// |// \|//
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
```
|
diff --git a/k8s_handle/templating.py b/k8s_handle/templating.py
index 7f2d6b7..445a09e 100644
--- a/k8s_handle/templating.py
+++ b/k8s_handle/templating.py
@@ -195,7 +195,10 @@ class Renderer:
@staticmethod
def _evaluate_tags(tags, only_tags, skip_tags):
- if only_tags is None and skip_tags is None:
- return True
+ if only_tags and tags.isdisjoint(only_tags):
+ return False
- return tags.isdisjoint(skip_tags or []) and not tags.isdisjoint(only_tags or tags)
+ if skip_tags and not tags.isdisjoint(skip_tags):
+ return False
+
+ return True
|
2gis__k8s-handle-73
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"k8s_handle/config.py:_process_variable"
],
"edited_modules": [
"k8s_handle/config.py:_process_variable"
]
},
"file": "k8s_handle/config.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"k8s_handle/filesystem.py:load_yaml"
],
"edited_modules": [
"k8s_handle/filesystem.py:load_yaml"
]
},
"file": "k8s_handle/filesystem.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"k8s_handle/templating.py:get_template_contexts"
],
"edited_modules": [
"k8s_handle/templating.py:get_template_contexts"
]
},
"file": "k8s_handle/templating.py"
}
] |
2gis/k8s-handle
|
dec5c73ec1bcd694bd45651901d68cd933721b3e
|
It is not possible to concatenate several environment variables into one value
Привет.
Столкнулся с невозможностью одновременного использования нескольких переменных окружения при объявлении переменной в config.yaml. Небольшой пример:
Мы при развертывании сервиса иногда создаем более одного деплоя, registry используем как приватные так и публичные, да еще и registry приватных у нас несколько. Так же при каждом деплое у нас выбирается образ по тэгу в git. Для того, что бы не создавать множество шаблонов деплоя, мы сделали один универсальный, в цикле которого добавляем все необходимые образы. По этому все образы объявляются в config.yaml
```yaml
common:
deployments:
service_name:
containers:
app:
image: "{{ env='CI_REGISTRY' }}/service-image:{{ env='TAG' }}"
nginx:
image: "{{ env='CI_REGISTRY' }}/custom-nginx:configmap"
```
Так вот при такой записи в случае с образом nginx все ОК, в случае с контейнером app после создания манифеста имеем "{{ env='CI_REGISTRY' }}/service-image:1.0.0". Заглянул в код и понял, что в текущей реализации не получится из нескольких переменных окружения получить нужное нам имя образа.
|
diff --git a/.dockerignore b/.dockerignore
index d3f33a2..fea1e67 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -6,5 +6,10 @@ Dockerfile*
.gitignore
.idea/
.tox/
+.travis.yml
+tox.ini
__pychache__
htmlcov/
+tests/
+*.png
+
diff --git a/k8s_handle/config.py b/k8s_handle/config.py
index acbf34f..6c2564f 100644
--- a/k8s_handle/config.py
+++ b/k8s_handle/config.py
@@ -13,7 +13,7 @@ from k8s_handle.templating import b64decode
log = logging.getLogger(__name__)
INCLUDE_RE = re.compile(r'{{\s?file\s?=\s?\'(?P<file>[^\']*)\'\s?}}')
-CUSTOM_ENV_RE = re.compile(r'^(?P<prefix>.*){{\s*env\s*=\s*\'(?P<env>[^\']*)\'\s*}}(?P<postfix>.*)$') # noqa
+CUSTOM_ENV_RE = r'{{\s*env\s*=\s*\'([^\']*)\'\s*}}'
KEY_USE_KUBECONFIG = 'use_kubeconfig'
KEY_K8S_MASTER_URI = 'k8s_master_uri'
@@ -113,19 +113,15 @@ def _process_variable(variable):
if matches:
return load_yaml(matches.groupdict().get('file'))
- matches = CUSTOM_ENV_RE.match(variable)
+ try:
+ return re.sub(CUSTOM_ENV_RE, lambda m: os.environ[m.group(1)], variable)
- if matches:
- prefix = matches.groupdict().get('prefix')
- env_var_name = matches.groupdict().get('env')
- postfix = matches.groupdict().get('postfix')
-
- if os.environ.get(env_var_name) is None and settings.GET_ENVIRON_STRICT:
- raise RuntimeError('Environment variable "{}" is not set'.format(env_var_name))
-
- return prefix + os.environ.get(env_var_name, '') + postfix
+ except KeyError as err:
+ log.debug('Environment variable "{}" is not set'.format(err.args[0]))
+ if settings.GET_ENVIRON_STRICT:
+ raise RuntimeError('Environment variable "{}" is not set'.format(err.args[0]))
- return variable
+ return re.sub(CUSTOM_ENV_RE, lambda m: os.environ.get(m.group(1), ''), variable)
def _update_single_variable(value, include_history):
diff --git a/k8s_handle/filesystem.py b/k8s_handle/filesystem.py
index 5d200eb..7993290 100644
--- a/k8s_handle/filesystem.py
+++ b/k8s_handle/filesystem.py
@@ -17,7 +17,7 @@ class InvalidYamlError(Exception):
def load_yaml(path):
try:
with open(path) as f:
- return yaml.load(f.read())
+ return yaml.safe_load(f.read())
except Exception as e:
raise InvalidYamlError("file '{}' doesn't contain valid yaml: {}".format(
path, e))
diff --git a/k8s_handle/templating.py b/k8s_handle/templating.py
index 2a5c441..7476f07 100644
--- a/k8s_handle/templating.py
+++ b/k8s_handle/templating.py
@@ -19,7 +19,7 @@ log = logging.getLogger(__name__)
def get_template_contexts(file_path):
with open(file_path) as f:
try:
- contexts = yaml.load_all(f.read())
+ contexts = yaml.safe_load_all(f.read())
except Exception as e:
raise RuntimeError('Unable to load yaml file: {}, {}'.format(file_path, e))
|
2gis__k8s-handle-84
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"k8s_handle/config.py:load_context_section"
],
"edited_modules": [
"k8s_handle/config.py:load_context_section"
]
},
"file": "k8s_handle/config.py"
}
] |
2gis/k8s-handle
|
92f764f44301bcd406d588a4db5cf0333fc1ccc2
|
Empty section passes validation
Empty section string (-s "") passes validation and causes not wrapped KeyError.
|
diff --git a/k8s_handle/config.py b/k8s_handle/config.py
index df08de4..17cdb33 100644
--- a/k8s_handle/config.py
+++ b/k8s_handle/config.py
@@ -156,6 +156,9 @@ def _update_context_recursively(context, include_history=[]):
def load_context_section(section):
+ if not section:
+ raise RuntimeError('Empty section specification is not allowed')
+
if section == settings.COMMON_SECTION_NAME:
raise RuntimeError('Section "{}" is not intended to deploy'.format(settings.COMMON_SECTION_NAME))
@@ -163,7 +166,8 @@ def load_context_section(section):
if context is None:
raise RuntimeError('Config file "{}" is empty'.format(settings.CONFIG_FILE))
- if section and section not in context:
+
+ if section not in context:
raise RuntimeError('Section "{}" not found in config file "{}"'.format(section, settings.CONFIG_FILE))
# delete all sections except common and used section
|
3YOURMIND__django-migration-linter-113
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"django_migration_linter/migration_linter.py:MigrationLinter.read_migrations_list",
"django_migration_linter/migration_linter.py:MigrationLinter._gather_migrations_git",
"django_migration_linter/migration_linter.py:MigrationLinter._gather_all_migrations"
],
"edited_modules": [
"django_migration_linter/migration_linter.py:MigrationLinter"
]
},
"file": "django_migration_linter/migration_linter.py"
}
] |
3YOURMIND/django-migration-linter
|
799957a5564e8ca1ea20d7cf643abbc21db4e40f
|
Bug: --include-migrations-from argument being ignored
In version 2.2.2, using the `--include-migration-from` argument and specifying a migration .py file will not work and `lintmigrations` will run on all migration files.
On [line 299](https://github.com/3YOURMIND/django-migration-linter/blob/799957a5564e8ca1ea20d7cf643abbc21db4e40f/django_migration_linter/migration_linter.py#L299) of `migration_linter.py` the method `is_migration_file` is being called with every line of the `migrations_file_path` file instead of the filename `migrations_file_path`
|
diff --git a/django_migration_linter/migration_linter.py b/django_migration_linter/migration_linter.py
index 31f8fea..c5ea333 100644
--- a/django_migration_linter/migration_linter.py
+++ b/django_migration_linter/migration_linter.py
@@ -289,8 +289,13 @@ class MigrationLinter(object):
@classmethod
def read_migrations_list(cls, migrations_file_path):
+ """
+ Returning an empty list is different from returning None here.
+ None: no file was specified and we should consider all migrations
+ Empty list: no migration found in the file and we should consider no migration
+ """
if not migrations_file_path:
- return []
+ return None
migrations = []
try:
@@ -300,7 +305,14 @@ class MigrationLinter(object):
app_label, name = split_migration_path(line)
migrations.append((app_label, name))
except IOError:
- logger.warning("Migrations list path not found %s", migrations_file_path)
+ logger.exception("Migrations list path not found %s", migrations_file_path)
+ raise Exception("Error while reading migrations list file")
+
+ if not migrations:
+ logger.info(
+ "No valid migration paths found in the migrations file %s",
+ migrations_file_path,
+ )
return migrations
def _gather_migrations_git(self, git_commit_id, migrations_list=None):
@@ -315,7 +327,7 @@ class MigrationLinter(object):
# Only gather lines that include added migrations
if self.is_migration_file(line):
app_label, name = split_migration_path(line)
- if not migrations_list or (app_label, name) in migrations_list:
+ if migrations_list is None or (app_label, name) in migrations_list:
migration = self.migration_loader.disk_migrations[app_label, name]
migrations.append(migration)
diff_process.wait()
@@ -334,7 +346,7 @@ class MigrationLinter(object):
migration,
) in self.migration_loader.disk_migrations.items():
if app_label not in DJANGO_APPS_WITH_MIGRATIONS: # Prune Django apps
- if not migrations_list or (app_label, name) in migrations_list:
+ if migrations_list is None or (app_label, name) in migrations_list:
yield migration
def should_ignore_migration(self, app_label, migration_name, operations=()):
|
3YOURMIND__django-migration-linter-156
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"django_migration_linter/migration_linter.py:MigrationLinter.lint_migration",
"django_migration_linter/migration_linter.py:MigrationLinter.lint_runsql"
],
"edited_modules": [
"django_migration_linter/migration_linter.py:MigrationLinter"
]
},
"file": "django_migration_linter/migration_linter.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"django_migration_linter/sql_analyser/analyser.py:analyse_sql_statements"
],
"edited_modules": [
"django_migration_linter/sql_analyser/analyser.py:analyse_sql_statements"
]
},
"file": "django_migration_linter/sql_analyser/analyser.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"django_migration_linter/sql_analyser/base.py:BaseAnalyser.__init__",
"django_migration_linter/sql_analyser/base.py:BaseAnalyser.one_line_migration_tests",
"django_migration_linter/sql_analyser/base.py:BaseAnalyser.transaction_migration_tests",
"django_migration_linter/sql_analyser/base.py:BaseAnalyser._test_sql"
],
"edited_modules": [
"django_migration_linter/sql_analyser/base.py:BaseAnalyser"
]
},
"file": "django_migration_linter/sql_analyser/base.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": [
"django_migration_linter/sql_analyser/mysql.py:MySqlAnalyser"
]
},
"file": "django_migration_linter/sql_analyser/mysql.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": [
"django_migration_linter/sql_analyser/postgresql.py:PostgresqlAnalyser"
]
},
"file": "django_migration_linter/sql_analyser/postgresql.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": null,
"edited_modules": [
"django_migration_linter/sql_analyser/sqlite.py:SqliteAnalyser"
]
},
"file": "django_migration_linter/sql_analyser/sqlite.py"
}
] |
3YOURMIND/django-migration-linter
|
a119b4ba1fdfd27bf950e109771c6fd3e41d48dc
|
Raise backend specific deployment implications
For instance, certain operations will potentially lock a table, which can have implications during deployment (lots of operations require the table => we don't acquire the lock waiting to migrate / one we have the lock, long migration, locking the table and making production hang)
https://github.com/yandex/zero-downtime-migrations might be interesting to help out for these warnings
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 75223f0..1ee0f0b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,9 @@
* the positional argument `GIT_COMMIT_ID` becomes an optional argument with the named parameter ` --git-commit-id [GIT_COMMIT_ID]`
* the `lintmigrations` command takes now two positional arguments: `lintmigrations [app_label] [migration_name]`
+New features:
+* raise warning when create or dropping an index in a non-concurrent manner using postgresql
+
Miscellaneous:
* Add complete and working support for `toml` configuration files
* Add code coverage to the linter
diff --git a/django_migration_linter/migration_linter.py b/django_migration_linter/migration_linter.py
index 3ce96ae..c83a9ea 100644
--- a/django_migration_linter/migration_linter.py
+++ b/django_migration_linter/migration_linter.py
@@ -159,17 +159,19 @@ class MigrationLinter(object):
return
sql_statements = self.get_sql(app_label, migration_name)
- errors, ignored = analyse_sql_statements(
+ errors, ignored, warnings = analyse_sql_statements(
sql_statements,
settings.DATABASES[self.database]["ENGINE"],
self.exclude_migration_tests,
)
- err, ignored_data, warnings = self.analyse_data_migration(migration)
+ err, ignored_data, warnings_data = self.analyse_data_migration(migration)
if err:
errors += err
if ignored_data:
ignored += ignored_data
+ if warnings_data:
+ warnings += warnings_data
if self.warnings_as_errors:
errors += warnings
@@ -616,7 +618,7 @@ class MigrationLinter(object):
else:
sql_statements.append(runsql.sql)
- sql_errors, sql_ignored = analyse_sql_statements(
+ sql_errors, sql_ignored, sql_warnings = analyse_sql_statements(
sql_statements,
settings.DATABASES[self.database]["ENGINE"],
self.exclude_migration_tests,
@@ -625,6 +627,8 @@ class MigrationLinter(object):
error += sql_errors
if sql_ignored:
ignored += sql_ignored
+ if sql_warnings:
+ warning += sql_warnings
# And analysse the reverse SQL
if runsql.reversible and runsql.reverse_sql != RunSQL.noop:
@@ -644,7 +648,7 @@ class MigrationLinter(object):
else:
sql_statements.append(runsql.reverse_sql)
- sql_errors, sql_ignored = analyse_sql_statements(
+ sql_errors, sql_ignored, sql_warnings = analyse_sql_statements(
sql_statements,
settings.DATABASES[self.database]["ENGINE"],
self.exclude_migration_tests,
@@ -653,5 +657,7 @@ class MigrationLinter(object):
error += sql_errors
if sql_ignored:
ignored += sql_ignored
+ if sql_warnings:
+ warning += sql_warnings
return error, ignored, warning
diff --git a/django_migration_linter/sql_analyser/analyser.py b/django_migration_linter/sql_analyser/analyser.py
index d69f873..d98610a 100644
--- a/django_migration_linter/sql_analyser/analyser.py
+++ b/django_migration_linter/sql_analyser/analyser.py
@@ -29,4 +29,4 @@ def analyse_sql_statements(
):
sql_analyser = get_sql_analyser(database_vendor, exclude_migration_tests)
sql_analyser.analyse(sql_statements)
- return sql_analyser.errors, sql_analyser.ignored
+ return sql_analyser.errors, sql_analyser.ignored, sql_analyser.warnings
diff --git a/django_migration_linter/sql_analyser/base.py b/django_migration_linter/sql_analyser/base.py
index 4b60ebf..584ebbc 100644
--- a/django_migration_linter/sql_analyser/base.py
+++ b/django_migration_linter/sql_analyser/base.py
@@ -48,24 +48,28 @@ class BaseAnalyser(object):
or re.search("ALTER TABLE .* RENAME TO", sql),
"msg": "RENAMING tables",
"mode": "one_liner",
+ "type": "error",
},
{
"code": "NOT_NULL",
"fn": has_not_null_column,
"msg": "NOT NULL constraint on columns",
"mode": "transaction",
+ "type": "error",
},
{
"code": "DROP_COLUMN",
"fn": lambda sql, **kw: re.search("DROP COLUMN", sql),
"msg": "DROPPING columns",
"mode": "one_liner",
+ "type": "error",
},
{
"code": "DROP_TABLE",
"fn": lambda sql, **kw: sql.startswith("DROP TABLE"),
"msg": "DROPPING table",
"mode": "one_liner",
+ "type": "error",
},
{
"code": "RENAME_COLUMN",
@@ -73,6 +77,7 @@ class BaseAnalyser(object):
or re.search("ALTER TABLE .* RENAME COLUMN", sql),
"msg": "RENAMING columns",
"mode": "one_liner",
+ "type": "error",
},
{
"code": "ALTER_COLUMN",
@@ -84,12 +89,14 @@ class BaseAnalyser(object):
"You may ignore this migration.)"
),
"mode": "one_liner",
+ "type": "error",
},
{
"code": "ADD_UNIQUE",
"fn": has_add_unique,
"msg": "ADDING unique constraint",
"mode": "transaction",
+ "type": "error",
},
]
@@ -98,6 +105,7 @@ class BaseAnalyser(object):
def __init__(self, exclude_migration_tests):
self.exclude_migration_tests = exclude_migration_tests or []
self.errors = []
+ self.warnings = []
self.ignored = []
self.migration_tests = update_migration_tests(
self.base_migration_tests, self.migration_tests
@@ -113,21 +121,26 @@ class BaseAnalyser(object):
@property
def one_line_migration_tests(self):
- return [test for test in self.migration_tests if test["mode"] == "one_liner"]
+ return (test for test in self.migration_tests if test["mode"] == "one_liner")
@property
def transaction_migration_tests(self):
- return [test for test in self.migration_tests if test["mode"] == "transaction"]
+ return (test for test in self.migration_tests if test["mode"] == "transaction")
def _test_sql(self, test, sql):
if test["fn"](sql, errors=self.errors):
- err = self.build_error_dict(migration_test=test, sql_statement=sql)
if test["code"] in self.exclude_migration_tests:
- logger.debug("Testing %s -- IGNORED", sql)
- self.ignored.append(err)
+ action = "IGNORED"
+ list_to_add = self.ignored
+ elif test["type"] == "warning":
+ action = "WARNING"
+ list_to_add = self.warnings
else:
- logger.debug("Testing %s -- ERROR", sql)
- self.errors.append(err)
+ action = "ERROR"
+ list_to_add = self.errors
+ logger.debug("Testing %s -- %s", sql, action)
+ err = self.build_error_dict(migration_test=test, sql_statement=sql)
+ list_to_add.append(err)
else:
logger.debug("Testing %s -- PASSED", sql)
diff --git a/django_migration_linter/sql_analyser/mysql.py b/django_migration_linter/sql_analyser/mysql.py
index 4178c32..487a85e 100644
--- a/django_migration_linter/sql_analyser/mysql.py
+++ b/django_migration_linter/sql_analyser/mysql.py
@@ -11,6 +11,7 @@ class MySqlAnalyser(BaseAnalyser):
"ALTER TABLE .* MODIFY .* (?!NULL);?$", sql
),
"mode": "one_liner",
+ "type": "error",
}
]
diff --git a/django_migration_linter/sql_analyser/postgresql.py b/django_migration_linter/sql_analyser/postgresql.py
index 5da16c2..140aba3 100644
--- a/django_migration_linter/sql_analyser/postgresql.py
+++ b/django_migration_linter/sql_analyser/postgresql.py
@@ -1,5 +1,31 @@
+import re
+
from .base import BaseAnalyser
class PostgresqlAnalyser(BaseAnalyser):
- migration_tests = []
+ migration_tests = [
+ {
+ "code": "CREATE_INDEX",
+ "fn": lambda sql, **kw: re.search("CREATE (UNIQUE )?INDEX", sql)
+ and not re.search("INDEX CONCURRENTLY", sql),
+ "msg": "CREATE INDEX locks table",
+ "mode": "one_liner",
+ "type": "warning",
+ },
+ {
+ "code": "DROP_INDEX",
+ "fn": lambda sql, **kw: re.search("DROP INDEX", sql)
+ and not re.search("INDEX CONCURRENTLY", sql),
+ "msg": "DROP INDEX locks table",
+ "mode": "one_liner",
+ "type": "warning",
+ },
+ {
+ "code": "REINDEX",
+ "fn": lambda sql, **kw: sql.startswith("REINDEX"),
+ "msg": "REINDEX locks table",
+ "mode": "one_liner",
+ "type": "warning",
+ },
+ ]
diff --git a/django_migration_linter/sql_analyser/sqlite.py b/django_migration_linter/sql_analyser/sqlite.py
index 21503c6..9a947d1 100644
--- a/django_migration_linter/sql_analyser/sqlite.py
+++ b/django_migration_linter/sql_analyser/sqlite.py
@@ -27,6 +27,7 @@ class SqliteAnalyser(BaseAnalyser):
"fn": lambda sql, **kw: re.search("ALTER TABLE .* RENAME TO", sql)
and "__old" not in sql
and "new__" not in sql,
+ "type": "error",
},
{
"code": "DROP_TABLE",
@@ -37,6 +38,7 @@ class SqliteAnalyser(BaseAnalyser):
and not any(sql.startswith("CREATE TABLE") for sql in sql_statements),
"msg": "DROPPING table",
"mode": "transaction",
+ "type": "error",
},
{
"code": "NOT_NULL",
@@ -50,8 +52,14 @@ class SqliteAnalyser(BaseAnalyser):
for sql in sql_statements
),
"mode": "transaction",
+ "type": "error",
+ },
+ {
+ "code": "ADD_UNIQUE",
+ "fn": has_add_unique,
+ "mode": "transaction",
+ "type": "error",
},
- {"code": "ADD_UNIQUE", "fn": has_add_unique, "mode": "transaction"},
]
@staticmethod
diff --git a/docs/incompatibilities.md b/docs/incompatibilities.md
index 4f7b0ef..883a79f 100644
--- a/docs/incompatibilities.md
+++ b/docs/incompatibilities.md
@@ -39,3 +39,6 @@ You can ignore check through the `--exclude-migration-test` option and specifyin
|`RUNPYTHON_MODEL_IMPORT` | Missing apps.get_model() calls for model
|`RUNPYTHON_MODEL_VARIABLE_NAME` | The model variable name is different from the model class itself
|`RUNSQL_REVERSIBLE` | RunSQL data migration is not reversible (missing reverse SQL)
+|`CREATE_INDEX` | (Postgresql specific) Creating an index without the concurrent keyword will lock the table and may generate downtime
+|`DROP_INDEX` | (Postgresql specific) Dropping an index without the concurrent keyword will lock the table and may generate downtime
+|`REINDEX` | (Postgresql specific) Reindexing will lock the table and may generate downtime
|
3YOURMIND__django-migration-linter-186
|
[
{
"changes": {
"added_entities": [
"django_migration_linter/sql_analyser/postgresql.py:has_create_index"
],
"added_modules": [
"django_migration_linter/sql_analyser/postgresql.py:has_create_index"
],
"edited_entities": null,
"edited_modules": [
"django_migration_linter/sql_analyser/postgresql.py:PostgresqlAnalyser"
]
},
"file": "django_migration_linter/sql_analyser/postgresql.py"
}
] |
3YOURMIND/django-migration-linter
|
aef3db3e4198d06c38bc4b0874e72ed657891eea
|
Linter fails on CREATE INDEX when creating a new table
Here is an example `CreateModel` from Django:
```python
migrations.CreateModel(
name='ShipmentMetadataAlert',
fields=[
('deleted_at', models.DateTimeField(blank=True, db_index=True, null=True)),
('created_at', common.fields.CreatedField(default=django.utils.timezone.now, editable=False)),
('updated_at', common.fields.LastModifiedField(default=django.utils.timezone.now, editable=False)),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='ID')),
('message', models.TextField(blank=True, null=True)),
('level', models.CharField(blank=True, choices=[('HIGH', 'high'), ('MEDIUM', 'medium'), ('LOW', 'low')], max_length=16, null=True)),
('type', models.CharField(blank=True, choices=[('MOBILE_DEVICE_ALERT', 'MOBILE_DEVICE_ALERT'), ('NON_ACTIVE_CARRIER', 'NON_ACTIVE_CARRIER'), ('OTHER', 'OTHER')], max_length=32, null=True)),
('subtype', models.CharField(blank=True, choices=[('DRIVER_PERMISSIONS', 'DRIVER_PERMISSIONS'), ('DRIVER_LOCATION', 'DRIVER_LOCATION'), ('OTHER', 'OTHER')], max_length=32, null=True)),
('occurred_at', models.DateTimeField(null=True)),
('clear_alert_job_id', models.UUIDField(default=None, null=True)),
('metadata', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='alerts', to='shipments.ShipmentMetadata')),
],
options={
'abstract': False,
}
)
```
Here are the SQL statements that this spits out in `sqlmigrate`:
```sql
BEGIN;
--
-- Create model ShipmentMetadataAlert
--
CREATE TABLE "shipments_shipmentmetadataalert" ("deleted_at" timestamp with time zone NULL, "created_at" timestamp with time zone NOT NULL, "updated_at" timestamp with time zone NOT NULL, "id" uuid NOT NULL PRIMARY KEY, "message" text NULL, "level" varchar(16) NULL, "type" varchar(32) NULL, "subtype" varchar(32) NULL, "occurred_at" timestamp with time zone NULL, "clear_alert_job_id" uuid NULL, "metadata_id" uuid NOT NULL);
ALTER TABLE "shipments_shipmentmetadataalert" ADD CONSTRAINT "shipments_shipmentme_metadata_id_f20850e8_fk_shipments" FOREIGN KEY ("metadata_id") REFERENCES "shipments_shipmentmetadata" ("id") DEFERRABLE INITIALLY DEFERRED;
CREATE INDEX "shipments_shipmentmetadataalert_deleted_at_c9a93342" ON "shipments_shipmentmetadataalert" ("deleted_at");
CREATE INDEX "shipments_shipmentmetadataalert_metadata_id_f20850e8" ON "shipments_shipmentmetadataalert" ("metadata_id");
COMMIT;
```
This is an error from the linter as it outputs the error `CREATE INDEX locks table`. But the table is being created within the migration, it just needs to recognize that.
It seems like the `CREATE INDEX` detection should work the same way that the `ADD_UNIQUE` detection works where it detects that the create table is happening in the same migration:
https://github.com/3YOURMIND/django-migration-linter/blob/db71a9db23746f64d41d681f3fecb9b066c87338/django_migration_linter/sql_analyser/base.py#L26-L40
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d1ec8e5..15fefc0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,7 +1,8 @@
-## 4.0.0
+## 4.0.0 (unreleased)
- Drop support for Python 2.7 and 3.5
- Drop support for Django 1.11, 2.0, 2.1, 3.0
+- Fix index creation detection when table is being created in the transaction (issue #178)
## 3.0.1
diff --git a/django_migration_linter/sql_analyser/postgresql.py b/django_migration_linter/sql_analyser/postgresql.py
index 140aba3..3eb18a5 100644
--- a/django_migration_linter/sql_analyser/postgresql.py
+++ b/django_migration_linter/sql_analyser/postgresql.py
@@ -3,14 +3,32 @@ import re
from .base import BaseAnalyser
+def has_create_index(sql_statements, **kwargs):
+ regex_result = None
+ for sql in sql_statements:
+ regex_result = re.search(r"CREATE (UNIQUE )?INDEX.*ON (.*) \(", sql)
+ if re.search("INDEX CONCURRENTLY", sql):
+ regex_result = None
+ elif regex_result:
+ break
+ if not regex_result:
+ return False
+
+ concerned_table = regex_result.group(2)
+ table_is_added_in_transaction = any(
+ sql.startswith("CREATE TABLE {}".format(concerned_table))
+ for sql in sql_statements
+ )
+ return not table_is_added_in_transaction
+
+
class PostgresqlAnalyser(BaseAnalyser):
migration_tests = [
{
"code": "CREATE_INDEX",
- "fn": lambda sql, **kw: re.search("CREATE (UNIQUE )?INDEX", sql)
- and not re.search("INDEX CONCURRENTLY", sql),
+ "fn": has_create_index,
"msg": "CREATE INDEX locks table",
- "mode": "one_liner",
+ "mode": "transaction",
"type": "warning",
},
{
|
3YOURMIND__django-migration-linter-222
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"django_migration_linter/migration_linter.py:MigrationLinter.get_runpython_model_import_issues"
],
"edited_modules": [
"django_migration_linter/migration_linter.py:MigrationLinter"
]
},
"file": "django_migration_linter/migration_linter.py"
}
] |
3YOURMIND/django-migration-linter
|
3baf9487bde6ae27c3ba7623a410ab6c39bb0584
|
Linter failing when using django 'through'
### through doc
https://docs.djangoproject.com/en/4.0/ref/models/fields/#django.db.models.ManyToManyField.through
### Example code
```
def forwards_func(apps, schema_editor):
Question = apps.get_model("solution", "Question")
...
Question.many_to_may.through.objects.bulk_create(...) <- this line?
...
```
### Example Error
```
(fs_solution, 0002_my_migration)... ERR (cached)
'forwards_func': Could not find an 'apps.get_model("...", "through")' call. Importing the model directly is incorrect for data migrations.
```
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a1b5213..300fe00 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 4.1.1 (unreleased)
+
+- Fixed `RunPython` model import check when using a `through` object like `MyModel.many_to_many.through.objects.filter(...)` (issue #218)
+
## 4.1.0
- Allow configuring logging for `makemigrations` command and unify behaviour with `lintmigrations` (issue #207)
diff --git a/django_migration_linter/migration_linter.py b/django_migration_linter/migration_linter.py
index f4b1695..99e72f9 100644
--- a/django_migration_linter/migration_linter.py
+++ b/django_migration_linter/migration_linter.py
@@ -531,7 +531,7 @@ class MigrationLinter(object):
@staticmethod
def get_runpython_model_import_issues(code):
- model_object_regex = re.compile(r"[^a-zA-Z]?([a-zA-Z0-9]+?)\.objects")
+ model_object_regex = re.compile(r"[^a-zA-Z0-9._]?([a-zA-Z0-9._]+?)\.objects")
function_name = code.__name__
source_code = inspect.getsource(code)
@@ -539,6 +539,7 @@ class MigrationLinter(object):
called_models = model_object_regex.findall(source_code)
issues = []
for model in called_models:
+ model = model.split(".", 1)[0]
has_get_model_call = (
re.search(
r"{}.*= +\w+\.get_model\(".format(model),
|
3YOURMIND__django-migration-linter-258
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"django_migration_linter/sql_analyser/base.py:has_not_null_column"
],
"edited_modules": [
"django_migration_linter/sql_analyser/base.py:has_not_null_column"
]
},
"file": "django_migration_linter/sql_analyser/base.py"
}
] |
3YOURMIND/django-migration-linter
|
366d16b01a72d0baa54fef55761d846b0f05b8dd
|
Adding an index with a NOT NULL condition incorrectly triggers NOT_NULL rule
Adding an index with a `WHERE` clause including `NOT NULL` gets flagged as a `NOT NULL constraint on columns` error.
## Steps to reproduce
The follow migration operation:
```python
AddIndexConcurrently(
model_name="prediction",
index=models.Index(
condition=models.Q(
("data_deleted_at__isnull", True),
("delete_data_after__isnull", False),
),
fields=["delete_data_after"],
name="delete_data_after_idx",
),
),
```
Generates the following SQL:
```sql
CREATE INDEX CONCURRENTLY "delete_data_after_idx" ON "models_prediction" ("delete_data_after") WHERE ("data_deleted_at" IS NULL AND "delete_data_after" IS NOT NULL);
```
When linted this is flagged as an error because of the `NOT NULL`, when it ought to be a safe operation.
## Investigation
Looking at the condition used for this rule, I think it might just need to permit `CREATE INDEX` requests:
```python
re.search("(?<!DROP )NOT NULL", sql) and not sql.startswith("CREATE TABLE") and not sql.startswith("CREATE INDEX")
```
https://github.com/3YOURMIND/django-migration-linter/blob/202a6d9d5dea83528cb52fd7481a5a0565cc6f83/django_migration_linter/sql_analyser/base.py#L43
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3069d91..beafd65 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,10 +4,21 @@
Instead, the linter crashes and lets the `sqlmigrate` error raise, in order to avoid letting a problematic migration pass.
One common reason for such an error is the SQL generation which requires the database to be actually migrated in order to fetch actual constraint names from it.
The crash is a sign to double-check the migration. But if you are certain the migration is safe, you can ignore it (issue #209)
+
+Features:
+
- Fixed `RunPython` model import check when using a `through` object like `MyModel.many_to_many.through.objects.filter(...)` (issue #218)
- Mark the `IgnoreMigration` operation as `elidable=True`
+
+Bug:
+
+- Don't detect not nullable field on partial index creation (issue #250)
+
+Miscellaneous:
+
- Add support for Python 3.11
- Add support for Django 4.1
+- Add support for Django 4.2
- Drop support for Django 2.2
- Internally rename "migration tests" to "migration checks"
- Add dataclasses internally instead of custom dicts
diff --git a/django_migration_linter/sql_analyser/base.py b/django_migration_linter/sql_analyser/base.py
index 2fa0646..131652e 100644
--- a/django_migration_linter/sql_analyser/base.py
+++ b/django_migration_linter/sql_analyser/base.py
@@ -40,7 +40,8 @@ def has_not_null_column(sql_statements: list[str], **kwargs) -> bool:
ends_with_default = False
return (
any(
- re.search("(?<!DROP )NOT NULL", sql) and not sql.startswith("CREATE TABLE")
+ re.search("(?<!DROP )NOT NULL", sql)
+ and not (sql.startswith("CREATE TABLE") or sql.startswith("CREATE INDEX"))
for sql in sql_statements
)
and ends_with_default is False
|
3YOURMIND__django-migration-linter-47
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"django_migration_linter/migration_linter.py:_main"
],
"edited_modules": [
"django_migration_linter/migration_linter.py:_main"
]
},
"file": "django_migration_linter/migration_linter.py"
}
] |
3YOURMIND/django-migration-linter
|
fbf0f4419336fcb1235fa57f5575ad2593354e44
|
Add --version option
Pretty straightforward. Have a `--version` that prints the current version of the linter.
|
diff --git a/django_migration_linter/migration_linter.py b/django_migration_linter/migration_linter.py
index f9c0ab1..03c2054 100644
--- a/django_migration_linter/migration_linter.py
+++ b/django_migration_linter/migration_linter.py
@@ -20,7 +20,7 @@ from subprocess import Popen, PIPE
import sys
from .cache import Cache
-from .constants import DEFAULT_CACHE_PATH, MIGRATION_FOLDER_NAME
+from .constants import DEFAULT_CACHE_PATH, MIGRATION_FOLDER_NAME, __version__
from .migration import Migration
from .utils import is_directory, is_django_project, clean_bytes_to_str
from .sql_analyser import analyse_sql_statements
@@ -287,6 +287,9 @@ def _main():
action="store_true",
help="print more information during execution",
)
+ parser.add_argument(
+ "--version", "-V", action="version", version="%(prog)s {}".format(__version__)
+ )
parser.add_argument(
"--database",
type=str,
|
42DIGITAL__bqtools-11
|
[
{
"changes": {
"added_entities": [
"fourtytwo/bqtools/__init__.py:BQTable.__repr__"
],
"added_modules": null,
"edited_entities": [
"fourtytwo/bqtools/__init__.py:BQTable.__eq__",
"fourtytwo/bqtools/__init__.py:BQTable._set_schema",
"fourtytwo/bqtools/__init__.py:BQTable._set_data",
"fourtytwo/bqtools/__init__.py:BQTable.append"
],
"edited_modules": [
"fourtytwo/bqtools/__init__.py:BQTable"
]
},
"file": "fourtytwo/bqtools/__init__.py"
}
] |
42DIGITAL/bqtools
|
98ce0de1d976f33cf04217ef50f864f74bd5ed52
|
append data to empty table.data fails - required for schema_only
```python
>>> from fourtytwo import bqtools
>>> table = bqtools.read_bq(table_ref='project.dataset.table', schema_only=True)
>>> table.append([])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/bqtools/fourtytwo/bqtools/__init__.py", line 224, in append
for index in range(len(data)):
TypeError: object of type 'NoneType' has no len()
```
File "/bqtools/fourtytwo/bqtools/\_\_init\_\_.py", line 224, in append:
```python
def append(self, rows):
append_columns = _rows_to_columns(rows, self.schema)
data = self.data
for index in range(len(data)):
data[index] += append_columns[index]
```
Probably replace range(len(data)) with range(len(self.schema))
|
diff --git a/fourtytwo/bqtools/__init__.py b/fourtytwo/bqtools/__init__.py
index 9542242..136e18c 100644
--- a/fourtytwo/bqtools/__init__.py
+++ b/fourtytwo/bqtools/__init__.py
@@ -104,14 +104,21 @@ class BQTable(object):
def __init__(self, schema=None, data=None):
if DEBUG:
logging.debug('bqtools.BQTable.__init__')
-
+
self.schema = schema if schema else []
self.data = data if data else []
- #def __repr__(self):
- # pass
+ def __repr__(self):
+ schema_shape = len(self.schema)
+ if len(self.data) > 0:
+ data_shape = (len(self.data[0]), len(self.data))
+ else:
+ data_shape = (0,)
+ return '<bqtools.BQTable(shape_schema={}, shape_data={})>'.format(schema_shape, data_shape)
def __eq__(self, other):
+ if not isinstance(other, BQTable):
+ raise TypeError('other must be of type BQTable')
return self.schema == other.schema and self.data == other.data
def __setattr__(self, name, value):
@@ -146,13 +153,18 @@ class BQTable(object):
new_schema.append(bigquery.SchemaField(**field))
if self.schema and new_schema and new_schema != self.schema:
- data = self._move_columns(new_schema)
+ # TODO Handle appends to schema
+ # if len(new_schema) > len(self.schema)
+ data = self._move_columns(schema=new_schema)
else:
data = self.data
-
- data = self._typecheck(schema=new_schema, data=data)
- object.__setattr__(self, '_schema', new_schema)
- object.__setattr__(self, '_data', data)
+
+ if data:
+ data = self._typecheck(schema=new_schema, data=data)
+ object.__setattr__(self, '_schema', new_schema)
+ object.__setattr__(self, '_data', data)
+ else:
+ object.__setattr__(self, '_schema', new_schema)
def _set_data(self, data):
if DEBUG:
@@ -160,14 +172,14 @@ class BQTable(object):
if data and isinstance(data, list):
if isinstance(data[0], dict):
- data = _rows_to_columns(data, self.schema)
- data = self._typecheck(data=data)
+ data = _rows_to_columns(rows=data, schema=self.schema)
+ data = self._typecheck(data=data)
object.__setattr__(self, '_data', data)
def _move_columns(self, schema):
if DEBUG:
logging.debug('bqtools.BQTable._move_columns()')
-
+
old_field_names = [field.name for field in self.schema]
new_field_names = [field.name for field in schema]
column_order = [old_field_names.index(name) for name in new_field_names]
@@ -219,8 +231,8 @@ class BQTable(object):
self._rename_columns(mapping=columns)
def append(self, rows):
- append_columns = _rows_to_columns(rows, self.schema)
- data = self.data
+ append_columns = _rows_to_columns(rows=rows, schema=self.schema)
+ data = self.data if self.data else [[] for n in range(len(self.schema))]
for index in range(len(data)):
data[index] += append_columns[index]
self.data = data
|
4Catalyzer__flask-resty-248
|
[
{
"changes": {
"added_entities": [
"flask_resty/compat.py:_strict_run",
"flask_resty/compat.py:schema_load",
"flask_resty/compat.py:schema_dump"
],
"added_modules": [
"flask_resty/compat.py:_strict_run",
"flask_resty/compat.py:schema_load",
"flask_resty/compat.py:schema_dump"
],
"edited_entities": null,
"edited_modules": null
},
"file": "flask_resty/compat.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"flask_resty/fields.py:RelatedItem._deserialize"
],
"edited_modules": [
"flask_resty/fields.py:RelatedItem"
]
},
"file": "flask_resty/fields.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"flask_resty/view.py:ApiView.serialize",
"flask_resty/view.py:ApiView.deserialize",
"flask_resty/view.py:ApiView.request_args",
"flask_resty/view.py:ApiView.deserialize_args"
],
"edited_modules": [
"flask_resty/view.py:ApiView"
]
},
"file": "flask_resty/view.py"
}
] |
4Catalyzer/flask-resty
|
ac43163453fab1b23434d29f71a3c1b34b251c0a
|
Support Marshmallow 3
[From here](https://marshmallow.readthedocs.io/en/latest/upgrading.html#schemas-are-always-strict):
> Schema().load and Schema().dump don’t return a (data, errors) tuple any more. Only data is returned.
|
diff --git a/.travis.yml b/.travis.yml
index 708e4a9..00ffb59 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -4,16 +4,21 @@ services:
- postgresql
matrix:
include:
- - { python: "2.7", env: "TOXENV=py-full DATABASE_URL=postgres://localhost/travis_ci_test" }
- - { python: "3.5", env: "TOXENV=py-full DATABASE_URL=postgres://localhost/travis_ci_test" }
- - { python: "3.6", env: "TOXENV=py-full DATABASE_URL=postgres://localhost/travis_ci_test" }
- - { python: "3.7", env: "TOXENV=py-full DATABASE_URL=postgres://localhost/travis_ci_test" }
+ - { python: "2.7", env: "TOXENV=py-full-marshmallow2 DATABASE_URL=postgres://localhost/travis_ci_test" }
+ - { python: "3.5", env: "TOXENV=py-full-marshmallow2 DATABASE_URL=postgres://localhost/travis_ci_test" }
+ - { python: "3.6", env: "TOXENV=py-full-marshmallow2 DATABASE_URL=postgres://localhost/travis_ci_test" }
+ - { python: "3.7", env: "TOXENV=py-full-marshmallow2 DATABASE_URL=postgres://localhost/travis_ci_test" }
- - { python: "2.7", env: "TOXENV=py-base" }
- - { python: "3.6", env: "TOXENV=py-base" }
+ - { python: "2.7", env: "TOXENV=py-base-marshmallow2" }
+ - { python: "3.6", env: "TOXENV=py-base-marshmallow2" }
- - { python: "pypy2.7-6.0", env: "TOXENV=py-full" }
- - { python: "pypy3.5-6.0", env: "TOXENV=py-full" }
+ - { python: "2.7", env: "TOXENV=py-full-marshmallow3" }
+ - { python: "3.6", env: "TOXENV=py-full-marshmallow3" }
+ - { python: "2.7", env: "TOXENV=py-base-marshmallow3" }
+ - { python: "3.6", env: "TOXENV=py-base-marshmallow3" }
+
+ - { python: "pypy2.7-6.0", env: "TOXENV=py-full-marshmallow2" }
+ - { python: "pypy3.5-6.0", env: "TOXENV=py-full-marshmallow2" }
cache:
directories:
diff --git a/README.md b/README.md
index 91e22ef..3da8ccd 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# Flask-RESTy [![Travis][build-badge]][build] [![PyPI][pypi-badge]][pypi]
+# Flask-RESTy [![Travis][build-badge]][build] [![PyPI][pypi-badge]][pypi] [![marshmallow 2/3 compatible][marshmallow-badge]][marshmallow-upgrading]
Building blocks for REST APIs for [Flask](http://flask.pocoo.org/).
[![Codecov][codecov-badge]][codecov]
@@ -79,3 +79,6 @@ class WidgetSchema(TableSchema):
[codecov-badge]: https://img.shields.io/codecov/c/github/4Catalyzer/flask-resty/master.svg
[codecov]: https://codecov.io/gh/4Catalyzer/flask-resty
+
+[marshmallow-badge]: https://badgen.net/badge/marshmallow/2,3?list=1
+[marshmallow-upgrading]: https://marshmallow.readthedocs.io/en/latest/upgrading.html
diff --git a/flask_resty/compat.py b/flask_resty/compat.py
index 76a767c..4f86e88 100644
--- a/flask_resty/compat.py
+++ b/flask_resty/compat.py
@@ -1,8 +1,11 @@
import sys
+import marshmallow
+
# -----------------------------------------------------------------------------
PY2 = int(sys.version_info[0]) == 2
+MA2 = int(marshmallow.__version__[0]) == 2
# -----------------------------------------------------------------------------
@@ -10,3 +13,23 @@ if PY2:
basestring = basestring # noqa: F821
else:
basestring = (str, bytes)
+
+
+def _strict_run(method, obj_or_data, **kwargs):
+ result = method(obj_or_data, **kwargs)
+ if MA2: # Make marshmallow 2 schemas behave like marshmallow 3
+ data, errors = result
+ if errors:
+ raise marshmallow.ValidationError(errors, data=data)
+ else:
+ data = result
+
+ return data
+
+
+def schema_load(schema, in_data, **kwargs):
+ return _strict_run(schema.load, in_data, **kwargs)
+
+
+def schema_dump(schema, obj, **kwargs):
+ return _strict_run(schema.dump, obj, **kwargs)
diff --git a/flask_resty/fields.py b/flask_resty/fields.py
index 1c2f370..e1ee7f2 100644
--- a/flask_resty/fields.py
+++ b/flask_resty/fields.py
@@ -1,19 +1,18 @@
-from marshmallow import fields, ValidationError
+from marshmallow import fields
import marshmallow.utils
+from .compat import schema_load
+
# -----------------------------------------------------------------------------
class RelatedItem(fields.Nested):
- def _deserialize(self, value, attr, data):
+ def _deserialize(self, value, *args, **kwargs):
if self.many and not marshmallow.utils.is_collection(value):
self.fail('type', input=value, type=value.__class__.__name__)
# Do partial load of related item, as we only need the id.
- data, errors = self.schema.load(value, partial=True)
- if errors:
- raise ValidationError(errors, data=data)
- return data
+ return schema_load(self.schema, value, partial=True)
def _validate_missing(self, value):
# Do not display detailed error data on required fields in nested
diff --git a/flask_resty/view.py b/flask_resty/view.py
index fc6043b..a263919 100644
--- a/flask_resty/view.py
+++ b/flask_resty/view.py
@@ -2,7 +2,7 @@ import itertools
import flask
from flask.views import MethodView
-from marshmallow import fields
+from marshmallow import fields, ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import Load
from sqlalchemy.orm.exc import NoResultFound
@@ -11,6 +11,7 @@ from werkzeug.exceptions import NotFound
from . import meta
from .authentication import NoOpAuthentication
from .authorization import NoOpAuthorization
+from .compat import MA2, schema_dump, schema_load
from .decorators import request_cached_property
from .exceptions import ApiError
from .spec import ApiViewDeclaration, ModelViewDeclaration
@@ -36,7 +37,7 @@ class ApiView(MethodView):
return super(ApiView, self).dispatch_request(*args, **kwargs)
def serialize(self, item, **kwargs):
- return self.serializer.dump(item, **kwargs).data
+ return schema_dump(self.serializer, item, **kwargs)
@settable_property
def serializer(self):
@@ -111,11 +112,12 @@ class ApiView(MethodView):
return data_raw
def deserialize(self, data_raw, expected_id=None, **kwargs):
- data, errors = self.deserializer.load(data_raw, **kwargs)
- if errors:
+ try:
+ data = schema_load(self.deserializer, data_raw, **kwargs)
+ except ValidationError as e:
raise ApiError(422, *(
self.format_validation_error(error)
- for error in iter_validation_errors(errors)
+ for error in iter_validation_errors(e.messages)
))
self.validate_request_id(data, expected_id)
@@ -170,8 +172,11 @@ class ApiView(MethodView):
for field_name, field in self.args_schema.fields.items():
if field_name in args:
args_key = field_name
- elif field.load_from and field.load_from in args:
+ elif MA2 and field.load_from and field.load_from in args:
args_key = field.load_from
+ elif not MA2 and field.data_key and field.data_key in args:
+ args_key = field.data_key
+ field_name = field.data_key
else:
continue
@@ -187,11 +192,12 @@ class ApiView(MethodView):
return isinstance(field, fields.List)
def deserialize_args(self, data_raw, **kwargs):
- data, errors = self.args_schema.load(data_raw, **kwargs)
- if errors:
+ try:
+ data = schema_load(self.args_schema, data_raw, **kwargs)
+ except ValidationError as e:
raise ApiError(422, *(
self.format_parameter_validation_error(message, parameter)
- for parameter, messages in errors.items()
+ for parameter, messages in e.messages.items()
for message in messages
))
diff --git a/tox.ini b/tox.ini
index 2a92706..8f5f3b3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,9 +1,12 @@
[tox]
-envlist = py{27,35,37}-{base,full}
+envlist = py{27,35,37}-{base,full}-marshmallow{2,3}
[testenv]
passenv = DATABASE_URL
usedevelop = True
+deps =
+ marshmallow2: marshmallow>=2.2.0,<3.0.0
+ marshmallow3: marshmallow>=3.0.0rc5,<4.0.0
extras =
tests
full: apispec,jwt
|
4degrees__clique-26
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"source/clique/collection.py:Collection.format"
],
"edited_modules": [
"source/clique/collection.py:Collection"
]
},
"file": "source/clique/collection.py"
}
] |
4degrees/clique
|
a89507304acce5931f940c34025a6547fa8227b5
|
collection.format hits maximum recursion depth for collections with lots of holes.
The following code gives an example.
```python
paths = ["name.{0:04d}.jpg".format(x) for x in range(2000)[::2]]
collection = clique.assemble(paths)[0][0]
collection.format("{head}####{tail}")
```
|
diff --git a/source/clique/collection.py b/source/clique/collection.py
index 0c3b296..db9276c 100644
--- a/source/clique/collection.py
+++ b/source/clique/collection.py
@@ -251,15 +251,25 @@ class Collection(object):
else:
data['padding'] = '%d'
- if self.indexes:
+ if '{holes}' in pattern:
data['holes'] = self.holes().format('{ranges}')
+ if '{range}' in pattern or '{ranges}' in pattern:
indexes = list(self.indexes)
- if len(indexes) == 1:
+ indexes_count = len(indexes)
+
+ if indexes_count == 0:
+ data['range'] = ''
+
+ elif indexes_count == 1:
data['range'] = '{0}'.format(indexes[0])
+
else:
- data['range'] = '{0}-{1}'.format(indexes[0], indexes[-1])
+ data['range'] = '{0}-{1}'.format(
+ indexes[0], indexes[-1]
+ )
+ if '{ranges}' in pattern:
separated = self.separate()
if len(separated) > 1:
ranges = [collection.format('{range}')
@@ -270,11 +280,6 @@ class Collection(object):
data['ranges'] = ', '.join(ranges)
- else:
- data['holes'] = ''
- data['range'] = ''
- data['ranges'] = ''
-
return pattern.format(**data)
def is_contiguous(self):
|
6si__shipwright-79
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"shipwright/base.py:Shipwright.__init__",
"shipwright/base.py:Shipwright._build"
],
"edited_modules": [
"shipwright/base.py:Shipwright"
]
},
"file": "shipwright/base.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"shipwright/build.py:do_build",
"shipwright/build.py:build"
],
"edited_modules": [
"shipwright/build.py:do_build",
"shipwright/build.py:build"
]
},
"file": "shipwright/build.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"shipwright/cli.py:argparser",
"shipwright/cli.py:old_style_arg_dict",
"shipwright/cli.py:run"
],
"edited_modules": [
"shipwright/cli.py:argparser",
"shipwright/cli.py:old_style_arg_dict",
"shipwright/cli.py:run"
]
},
"file": "shipwright/cli.py"
}
] |
6si/shipwright
|
7d3ccf39acc79bb6d33a787e773227358764dd2c
|
docker pull all images for current branch and master before building
Because our buildserver forgets the docker cache between builds we pull the previous build for all the images.
it would be great if we could get shipwright to do it.
Otherwise a command like "shipright images" which lists all the images that shipwright *would* build would let us write our own command to do this.
|
diff --git a/CHANGES.rst b/CHANGES.rst
index f034d37..89cf5f1 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,7 +1,8 @@
0.5.1 (unreleased)
------------------
-- Nothing changed yet.
+- Add --pull-cache to pull images from repository before building.
+ (`Issue #49 <https://github.com/6si/shipwright/issues/49>`_).
0.5.0 (2016-08-19)
diff --git a/shipwright/base.py b/shipwright/base.py
index 213d597..421f1af 100644
--- a/shipwright/base.py
+++ b/shipwright/base.py
@@ -4,10 +4,11 @@ from . import build, dependencies, docker, push
class Shipwright(object):
- def __init__(self, source_control, docker_client, tags):
+ def __init__(self, source_control, docker_client, tags, pull_cache=False):
self.source_control = source_control
self.docker_client = docker_client
self.tags = tags
+ self._pull_cache = pull_cache
def targets(self):
return self.source_control.targets()
@@ -18,7 +19,10 @@ class Shipwright(object):
return self._build(this_ref_str, targets)
def _build(self, this_ref_str, targets):
- for evt in build.do_build(self.docker_client, this_ref_str, targets):
+ client = self.docker_client
+ pull_cache = self._pull_cache
+ ref = this_ref_str
+ for evt in build.do_build(client, ref, targets, pull_cache):
yield evt
# now that we're built and tagged all the images.
diff --git a/shipwright/build.py b/shipwright/build.py
index 707d4f9..4ee1558 100644
--- a/shipwright/build.py
+++ b/shipwright/build.py
@@ -13,7 +13,7 @@ def _merge(d1, d2):
return d
-def do_build(client, build_ref, targets):
+def do_build(client, build_ref, targets, pull_cache):
"""
Generic function for building multiple images while
notifying a callback function with output produced.
@@ -39,11 +39,11 @@ def do_build(client, build_ref, targets):
parent_ref = None
if target.parent:
parent_ref = build_index.get(target.parent)
- for evt in build(client, parent_ref, target):
+ for evt in build(client, parent_ref, target, pull_cache):
yield evt
-def build(client, parent_ref, image):
+def build(client, parent_ref, image, pull_cache):
"""
builds the given image tagged with <build_ref> and ensures that
it depends on it's parent if it's part of this build group (shares
@@ -62,7 +62,25 @@ def build(client, parent_ref, image):
built_tags = docker.last_built_from_docker(client, image.name)
if image.ref in built_tags:
- return []
+ return
+
+ if pull_cache:
+ pull_evts = client.pull(
+ repository=image.name,
+ tag=image.ref,
+ stream=True,
+ )
+
+ failed = False
+ for evt in pull_evts:
+ event = process_event_(evt)
+ if 'error' in event:
+ failed = True
+ else:
+ yield event
+
+ if not failed:
+ return
build_evts = client.build(
fileobj=mkcontext(parent_ref, image.path),
@@ -73,4 +91,5 @@ def build(client, parent_ref, image):
dockerfile=os.path.basename(image.path),
)
- return (process_event_(evt) for evt in build_evts)
+ for evt in build_evts:
+ yield process_event_(evt)
diff --git a/shipwright/cli.py b/shipwright/cli.py
index 24f6f78..82eaf50 100644
--- a/shipwright/cli.py
+++ b/shipwright/cli.py
@@ -109,6 +109,11 @@ def argparser():
help='Build working tree, including uncommited and untracked changes',
action='store_true',
)
+ common.add_argument(
+ '--pull-cache',
+ help='When building try to pull previously built images',
+ action='store_true',
+ )
a_arg(
common, '-d', '--dependants',
help='Build DEPENDANTS and all its dependants',
@@ -157,7 +162,6 @@ def old_style_arg_dict(namespace):
'--exclude': _flatten(ns.exclude),
'--help': False,
'--no-build': getattr(ns, 'no_build', False),
- '--dirty': getattr(ns, 'dirty', False),
'--upto': _flatten(ns.upto),
'--x-assert-hostname': ns.x_assert_hostname,
'-H': ns.docker_host,
@@ -237,8 +241,10 @@ def run(path, arguments, client_cfg, environ, new_style_args=None):
if new_style_args is None:
dirty = False
+ pull_cache = False
else:
dirty = new_style_args.dirty
+ pull_cache = new_style_args.pull_cache
namespace = config['namespace']
name_map = config.get('names', {})
@@ -249,7 +255,7 @@ def run(path, arguments, client_cfg, environ, new_style_args=None):
'to commit these changes, re-run with the --dirty flag.'
)
- sw = Shipwright(scm, client, arguments['tags'])
+ sw = Shipwright(scm, client, arguments['tags'], pull_cache)
command = getattr(sw, command_name)
show_progress = sys.stdout.isatty()
|
AI-SDC__AI-SDC-94
|
[
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"aisdc/attacks/report.py:NumpyArrayEncoder.default"
],
"edited_modules": [
"aisdc/attacks/report.py:NumpyArrayEncoder"
]
},
"file": "aisdc/attacks/report.py"
},
{
"changes": {
"added_entities": null,
"added_modules": null,
"edited_entities": [
"aisdc/attacks/worst_case_attack.py:WorstCaseAttackArgs.__init__",
"aisdc/attacks/worst_case_attack.py:WorstCaseAttack.attack",
"aisdc/attacks/worst_case_attack.py:WorstCaseAttack.attack_from_preds",
"aisdc/attacks/worst_case_attack.py:WorstCaseAttack._prepare_attack_data",
"aisdc/attacks/worst_case_attack.py:WorstCaseAttack.run_attack_reps"
],
"edited_modules": [
"aisdc/attacks/worst_case_attack.py:WorstCaseAttackArgs",
"aisdc/attacks/worst_case_attack.py:WorstCaseAttack"
]
},
"file": "aisdc/attacks/worst_case_attack.py"
}
] |
AI-SDC/AI-SDC
|
a42a2110ade262a7d699d5b71cfccbc787290d5d
|
Add option to include target model error into attacks as a feature
Whether or not the target model classifies an example correctly provides some signal that could be of use to an attacker. We currently do not use this in the attacks, but should include an option to allow it.
The 0/1 loss between the predicted class and the actual class (binary feature) should be added for the worst case attack.
|
diff --git a/aisdc/attacks/report.py b/aisdc/attacks/report.py
index 12b3887..515c709 100644
--- a/aisdc/attacks/report.py
+++ b/aisdc/attacks/report.py
@@ -1,4 +1,5 @@
"""Code for automatic report generation"""
+import abc
import json
import numpy as np
@@ -83,6 +84,8 @@ class NumpyArrayEncoder(json.JSONEncoder):
return int(o)
if isinstance(o, np.int32):
return int(o)
+ if isinstance(o, abc.ABCMeta):
+ return str(o)
return json.JSONEncoder.default(self, o)
diff --git a/aisdc/attacks/worst_case_attack.py b/aisdc/attacks/worst_case_attack.py
index 28e6512..361822c 100644
--- a/aisdc/attacks/worst_case_attack.py
+++ b/aisdc/attacks/worst_case_attack.py
@@ -14,6 +14,7 @@ from typing import Any
import numpy as np
import sklearn
from sklearn.ensemble import RandomForestClassifier
+from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from aisdc.attacks import metrics, report
@@ -40,7 +41,14 @@ class WorstCaseAttackArgs:
self.__dict__["in_sample_filename"] = None
self.__dict__["out_sample_filename"] = None
self.__dict__["report_name"] = None
+ self.__dict__["include_model_correct_feature"] = False
self.__dict__["sort_probs"] = True
+ self.__dict__["mia_attack_model"] = RandomForestClassifier
+ self.__dict__["mia_attack_model_hyp"] = {
+ "min_samples_split": 20,
+ "min_samples_leaf": 10,
+ "max_depth": 5,
+ }
self.__dict__.update(kwargs)
def __str__(self):
@@ -83,7 +91,20 @@ class WorstCaseAttack(Attack):
"""
train_preds = target_model.predict_proba(dataset.x_train)
test_preds = target_model.predict_proba(dataset.x_test)
- self.attack_from_preds(train_preds, test_preds)
+ train_correct = None
+ test_correct = None
+ if self.args.include_model_correct_feature:
+ train_correct = 1 * (
+ dataset.y_train == target_model.predict(dataset.x_train)
+ )
+ test_correct = 1 * (dataset.y_test == target_model.predict(dataset.x_test))
+
+ self.attack_from_preds(
+ train_preds,
+ test_preds,
+ train_correct=train_correct,
+ test_correct=test_correct,
+ )
def attack_from_prediction_files(self):
"""Start an attack from saved prediction files
@@ -98,7 +119,11 @@ class WorstCaseAttack(Attack):
self.attack_from_preds(train_preds, test_preds)
def attack_from_preds( # pylint: disable=too-many-locals
- self, train_preds: np.ndarray, test_preds: np.ndarray
+ self,
+ train_preds: np.ndarray,
+ test_preds: np.ndarray,
+ train_correct: np.ndarray = None,
+ test_correct: np.ndarray = None,
) -> None:
"""
Runs the attack based upon the predictions in train_preds and test_preds, and the params
@@ -115,7 +140,13 @@ class WorstCaseAttack(Attack):
"""
logger = logging.getLogger("attack-from-preds")
logger.info("Running main attack repetitions")
- self.attack_metrics = self.run_attack_reps(train_preds, test_preds)
+ self.attack_metrics = self.run_attack_reps(
+ train_preds,
+ test_preds,
+ train_correct=train_correct,
+ test_correct=test_correct,
+ )
+
if self.args.n_dummy_reps > 0:
logger.info("Running dummy attack reps")
self.dummy_attack_metrics = []
@@ -130,7 +161,11 @@ class WorstCaseAttack(Attack):
logger.info("Finished running attacks")
def _prepare_attack_data(
- self, train_preds: np.ndarray, test_preds: np.ndarray
+ self,
+ train_preds: np.ndarray,
+ test_preds: np.ndarray,
+ train_correct: np.ndarray = None,
+ test_correct: np.ndarray = None,
) -> tuple[np.ndarray, np.ndarray]:
"""Prepare training data and labels for attack model
Combines the train and test preds into a single numpy array (optionally) sorting each
@@ -143,12 +178,23 @@ class WorstCaseAttack(Attack):
test_preds = -np.sort(-test_preds, axis=1)
logger.info("Creating MIA data")
+
+ if self.args.include_model_correct_feature and train_correct is not None:
+ train_preds = np.hstack((train_preds, train_correct[:, None]))
+ test_preds = np.hstack((test_preds, test_correct[:, None]))
+
mi_x = np.vstack((train_preds, test_preds))
mi_y = np.hstack((np.ones(len(train_preds)), np.zeros(len(test_preds))))
return (mi_x, mi_y)
- def run_attack_reps(self, train_preds: np.ndarray, test_preds: np.ndarray) -> list:
+ def run_attack_reps( # pylint: disable = too-many-locals
+ self,
+ train_preds: np.ndarray,
+ test_preds: np.ndarray,
+ train_correct: np.ndarray = None,
+ test_correct: np.ndarray = None,
+ ) -> list:
"""
Run actual attack reps from train and test predictions
@@ -167,8 +213,9 @@ class WorstCaseAttack(Attack):
self.args.set_param("n_rows_in", len(train_preds))
self.args.set_param("n_rows_out", len(test_preds))
logger = logging.getLogger("attack-reps")
-
- mi_x, mi_y = self._prepare_attack_data(train_preds, test_preds)
+ mi_x, mi_y = self._prepare_attack_data(
+ train_preds, test_preds, train_correct, test_correct
+ )
mia_metrics = []
for rep in range(self.args.n_reps):
@@ -176,13 +223,25 @@ class WorstCaseAttack(Attack):
mi_train_x, mi_test_x, mi_train_y, mi_test_y = train_test_split(
mi_x, mi_y, test_size=self.args.test_prop, stratify=mi_y
)
- attack_classifier = RandomForestClassifier()
+ attack_classifier = self.args.mia_attack_model(
+ **self.args.mia_attack_model_hyp
+ )
attack_classifier.fit(mi_train_x, mi_train_y)
mia_metrics.append(
metrics.get_metrics(attack_classifier, mi_test_x, mi_test_y)
)
+ if self.args.include_model_correct_feature and train_correct is not None:
+ # Compute the Yeom TPR and FPR
+ yeom_preds = mi_test_x[:, -1]
+ tn, fp, fn, tp = confusion_matrix(mi_test_y, yeom_preds).ravel()
+ mia_metrics[-1]["yeom_tpr"] = tp / (tp + fn)
+ mia_metrics[-1]["yeom_fpr"] = fp / (fp + tn)
+ mia_metrics[-1]["yeom_advantage"] = (
+ mia_metrics[-1]["yeom_tpr"] - mia_metrics[-1]["yeom_fpr"]
+ )
+
logger.info("Finished simulating attacks")
return mia_metrics
@@ -554,6 +613,21 @@ def main():
help=("P-value threshold for significance testing. Default = %(default)f"),
)
+ # Not currently possible from the command line as we cannot compute the correctness
+ # of predictions. Possibly to be added in the future
+ # attack_parser.add_argument(
+ # "--include-correct",
+ # action="store",
+ # type=bool,
+ # required=False,
+ # default=False,
+ # dest='include_model_correct_feature',
+ # help=(
+ # "Whether or not to include an additional feature into the MIA attack model that "
+ # "holds whether or not the target model made a correct predicion for each example."
+ # ),
+ # )
+
attack_parser.add_argument(
"--sort-probs",
action="store",
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 7